I'm seeing performance issue while processing the xml file which has more then 10K records, it's working fine if the file has 100 records. Below is the sample procedure I'm using..
++++++++++++++++
CREATE OR REPLACE PROCEDURE process_xml_file(errbuf OUT varchar2
,retcode OUT number
,b_file IN varchar2 ) IS
l_stmt number; --use FOR error tracking
l_program_name varchar2(200) := 'MDM_EXTRACT_DATA';
l_user fnd_user.user_id%TYPE;
l_err_msg varchar2(2000);
l_err_code varchar2(200);
l_request_id number := 0;
l_err varchar2(2000);
l_sqlcode varchar2(40);
l_sqlerrm varchar2(4000);
CURSOR cur_product IS
SELECT ID,
Product_Name,
Product_Long_Description,
Product_Status,
PID_ID,
Primary_Flag,
Available_Kit_Version,
Product_Category_Code,
SGID
FROM (WITH t AS (SELECT XMLTYPE(bfilename('NCR_NCRX_BFILES_DIR', b_file), nls_charset_id('UTF8')) xmlcol
FROM dual)
SELECT ExtractValue(value(Product),'Product/ID') ID,
ExtractValue(value(Product),'Product/Product_Name') Product_Name,
ExtractValue(value(Product),'Product/Product_Long_Description') Product_Long_Description,
ExtractValue(value(Product),'Product/Product_Status') Product_Status,
ExtractValue(value(PID),'/PID/PID_ID') PID_ID,
ExtractValue(value(PID),'/PID/Primary_Flag') Primary_Flag,
ExtractValue(value(PID),'/PID/Available_Kit_Version') Available_Kit_Version,
ExtractValue(value(PID),'/PID/Product_Category_Code') Product_Category_Code,
ExtractValue(value(Assignments),'/Assignments/SGID') SGID
FROM t,
TABLE(XMLSequence(extract(t.xmlcol,'/Products/Product'))) Product,
TABLE(XMLSequence(extract(value(Product),'/Product/PIDS/PID'))) (+) PID,
TABLE(XMLSequence(extract(value(Product),'/Product/Assigned_Solution_Group/Assignments'))) (+) Assignments
);
BEGIN
NCR_APPL_INFO_PKG.SET_MODULE('INFX', l_program_name);
--l_request_id := p_request_id;
l_user := 80660;
l_stmt := 10;
IF b_file LIKE 'PMDM_Product%' THEN
FOR product_rec IN cur_product LOOP
BEGIN
l_stmt := 20;
INSERT
INTO infx.ncr_mdm_item_initial
(item,
description,
creation_date,
created_by,
last_update_login,
last_update_date,
last_updated_by,
long_description,
item_status,
product_id,
primary_flag,
solution_assignment,
product_flag,
attribute1,
attribute2)
SELECT UPPER(product_rec.id),
product_rec.product_name,
sysdate,
l_user,
null,
sysdate,
l_user,
product_rec.product_long_description,
product_rec.product_status,
UPPER(product_rec.pid_id),
product_rec.Primary_Flag,
UPPER(product_rec.SGID),
'Y',
product_rec.Available_Kit_Version,
product_rec.Product_Category_Code
FROM dual;
EXCEPTION when others THEN
l_sqlcode := NULL;
l_sqlerrm := NULL;
l_sqlcode := sqlcode;
l_sqlerrm := sqlerrm;
dbms_output.put_line (' Error While Processing the file : '||l_sqlcode || ' :: '||l_sqlerrm );
END;
END LOOP;
COMMIt;
END IF;
END;
/
++++++++++++++++++++++
DECLARE
l_errbuf VARCHAR2(100);
l_retcode NUMBER;
BEGIN
PROCESS_XML_FILE (l_errbuf,l_retcode,'PMDM_Product_4500817163558623_10292017040025.xml');
END;
/
++++++++++++++++++
If by processing 10k records in your file, you mean you're inserting 10,000+ rows, then the problem almost certainly comes here:
FOR product_rec IN cur_product LOOP
Looping is slow, mmmmkay!
What's more, I don't see any particular reason to use a loop here. Insert the output of the select statement in your cursor, uppering as needed. e.g.:
INSERT INTO infx.ncr_mdm_item_initial...
SELECT ID,
Product_Name,
Product_Long_Description,
Product_Status,
PID_ID,
Primary_Flag,
Available_Kit_Version,
Product_Category_Code,
SGID
FROM (WITH t AS (SELECT XMLTYPE(...)) ...
If, after replacing this with a single select, you find it's still slow trace your code to see what it's doing!
You can find instructions on this at:
https://blogs.oracle.com/sql/how-to-create-an-execution-plan#tkprof If you're still stuck at this point, let us know the results of your findings and we'll see what how we can help.