Skip to Main Content
  • Questions
  • Parsing a CLOB field with CSV data and put the contents into it proper fields

Breadcrumb

Question and Answer

Connor McDonald

Thanks for the question, Robert.

Asked: June 01, 2018 - 11:10 am UTC

Last updated: January 19, 2023 - 12:02 am UTC

Version: 12.X

Viewed 10K+ times! This question is

You Asked

My Question is a variation of the one originally posted on 11/9/2015. parsing a CLOB field which contains CSV data.

My delimited data that is loaded into a clob field contains output in the form attribute=data~ The clob field can contain up to 60 attributes (short list below) that all need to be analyzed to extract the data into a table with matching attribute fields. Here is an example of the clob data

pid=29942~uid=0~old-auid=4294967295~auid=0~tty=(none)~old-ses=4294967295~ses=5020~res=1~

My output table will have all the attributes listed below. Goal is to extract the CLOB field for the given record into the proper output table fields.

I already completed this parsing application in linux shell script, but performance is very slow.
Any guidance that can be provided is appreciated.

Regards,
Robert

pid=
arch=
cwd=
item=
proctitle=
uid=
name=
syscall=
auid=
old-auid=
inode=
per=
ses=
success=
tty=



and Connor said...

Something like this should get you started

SQL> create table T ( c clob );

Table created.

SQL>
SQL> declare
  2     v varchar2(32000) := 'pid=29942~uid=0~old-auid=4294967295~auid=0~tty=(none)~old-ses=4294967295~ses=5020~res=1~';
  3     ctemp clob;
  4   begin
  5     for i in 1 .. 8 loop
  6       v := v ||v;
  7     end loop;
  8
  9     dbms_lob.createtemporary(ctemp,true);
 10     ctemp := v;
 11
 12     for i in 1 .. 20 loop
 13       dbms_lob.writeappend(ctemp,length(v),v);
 14     end loop;
 15
 16     insert into T values (ctemp);
 17     commit;
 18   end;
 19   /

PL/SQL procedure successfully completed.

SQL>
SQL> select dbms_lob.getlength(c) from t;

DBMS_LOB.GETLENGTH(C)
---------------------
               473088

1 row selected.

SQL>
SQL>
SQL> drop table GTT;

Table dropped.

SQL>
SQL> create global temporary table GTT ( id number,
  2  c_pid varchar2(20),
  3  c_uid varchar2(20),
  4  c_old_auid varchar2(20),
  5  c_auid varchar2(20),
  6  c_tty varchar2(20),
  7  c_old_ses varchar2(20),
  8  c_ses varchar2(20),
  9  c_res varchar2(20)
 10  )
 11  on commit preserve rows;

Table created.

SQL>
SQL>
SQL>  set serverout on
SQL>  declare
  2     l_big_chunk varchar2(32767);
  3     l_leftover varchar2(200);
  4     l_offset pls_integer;
  5     l_len pls_integer;
  6
  7     type t_rows is table of gtt%rowtype index by pls_integer;
  8     l_rows t_rows;
  9
 10     l_delim pls_integer;
 11     l_eq    pls_integer;
 12     l_elem  varchar2(100);
 13     l_col_name varchar2(30);
 14     l_row_idx pls_integer := 0;
 15   begin
 16   for i in ( select rownum r, c from t ) loop
 17     l_offset := 1;
 18
 19     loop
 20       l_big_chunk := dbms_lob.substr(i.c,32700,l_offset);
 21       l_len := length(l_big_chunk);
 22       l_offset := l_offset + l_len;
 23       l_big_chunk := l_leftover || l_big_chunk;
 24
 25       dbms_application_info.set_client_info(i.r||'-'||l_offset);
 26
 27       loop
 28         l_delim := instr(l_big_chunk,'~');
 29         exit when l_delim = 0 or l_big_chunk is null;
 30
 31  --dbms_output.put_line('l_big_chunk='||l_big_chunk);
 32  --dbms_output.put_line('l_delim='||l_delim);
 33
 34         l_elem := substr(l_big_chunk,1,l_delim-1);
 35         l_eq := instr(l_elem,'=');
 36         l_col_name := substr(l_elem,1,l_eq-1);
 37
 38  --dbms_output.put_line('l_elem='||l_elem);
 39  --dbms_output.put_line('l_eq='||l_eq);
 40  --dbms_output.put_line('l_col_name='||l_col_name);
 41
 42         if l_col_name = 'pid' then
 43           l_row_idx := l_row_idx + 1;
 44           l_rows(l_row_idx).c_pid := substr(l_elem,l_eq+1);
 45         elsif l_col_name = 'uid' then
 46           l_rows(l_row_idx).c_uid := substr(l_elem,l_eq+1);
 47         elsif l_col_name = 'old-auid' then
 48           l_rows(l_row_idx).c_old_auid := substr(l_elem,l_eq+1);
 49         elsif l_col_name = 'auid' then
 50           l_rows(l_row_idx).c_auid := substr(l_elem,l_eq+1);
 51         elsif l_col_name = 'tty' then
 52           l_rows(l_row_idx).c_tty := substr(l_elem,l_eq+1);
 53         elsif l_col_name = 'old-ses' then
 54           l_rows(l_row_idx).c_old_ses := substr(l_elem,l_eq+1);
 55         elsif l_col_name = 'ses' then
 56           l_rows(l_row_idx).c_ses := substr(l_elem,l_eq+1);
 57         elsif l_col_name = 'res' then
 58           l_rows(l_row_idx).c_res := substr(l_elem,l_eq+1);
 59         end if;
 60
 61         l_big_chunk := substr(l_big_chunk,l_delim+1);
 62       end loop;
 63       l_leftover := l_big_chunk;
 64
 65       exit when l_len < 32700;
 66     end loop;
 67
 68   end loop;
 69     forall i in 1 .. l_rows.count
 70        insert into gtt values  l_rows(i) ;
 71
 72     dbms_output.put_line('rows = '||sql%rowcount);
 73     commit;
 74
 75   end;
 76   /
rows = 5376

PL/SQL procedure successfully completed.

SQL>
SQL> select count(*) from gtt;

  COUNT(*)
----------
      5376

1 row selected.

SQL>
SQL> select * from gtt where rownum <= 10;

        ID C_PID                C_UID                C_OLD_AUID           C_AUID               C_TTY                C_OLD_SES            C_SES                C_RES
---------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- --------------------
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1
           29942                0                    4294967295           0                    (none)               4294967295           5020                 1

10 rows selected.


Rating

  (8 ratings)

Is this answer out of date? If it is, please let us know via a Comment

Comments

Alternative inspired by Stew Ashton JSON_TABLE usage

Kim Berg Hansen, June 06, 2018 - 10:12 am UTC

Hiya,

Since name/value pairs are very like JSON (just here using different delimiters), I got inspired by Stew Ashton using JSON_TABLE to split strings: https://stewashton.wordpress.com/2018/06/05/splitting-strings-a-new-champion/

I'm building a test table with 10 rows each with a CLOB with 90 (3*30) name/value pairs.
Then I use some REPLACE to turn the name=value~ format into JSON.
And then it can be parsed with JSON_TABLE.

create table T ( id integer, c clob );

declare
  v varchar2(32000);
  ctemp clob;
begin
  dbms_lob.createtemporary(ctemp,true);
  for j in 1 .. 10 loop
     dbms_lob.trim(ctemp,0);
     for i in 1 .. 30 loop
       v := 'pid'||i||'='||(j*10000000+i)||'~'
         || 'username'||i||'='||rpad(j||i||'abc',j*10+10*i,'xyz')||'~'
         || 'attr'||i||'='||rpad(j||i||'xyz',j*5+5*i,'abc')||'~';
       dbms_lob.writeappend(ctemp,length(v),v);
     end loop;
     insert into T values (j, ctemp);
  end loop;
  dbms_lob.freetemporary(ctemp);
  commit;
end;
/

select id, dbms_lob.getlength(c) len from t;

        ID        LEN
---------- ----------
         1       8448
         2       8898
         3       9348
         4       9798
         5      10248
         6      10698
         7      11148
         8      11598
         9      12048
        10      12528

10 rows selected.

set linesize 132
set pagesize 50
column id format 99
column j_obj format a100
set long 100

-- Turn into JSON

select id, '{"' || replace(replace(replace(rtrim(c,'~'), '"', '\"'), '=', '":"'), '~', '","') || '"}' j_obj
from t;

 ID J_OBJ
--- ----------------------------------------------------------------------------------------------------
  1 {"pid1":"10000001","username1":"11abcxyzxyzxyzxyzxyz","attr1":"11xyzabcab","pid2":"10000002","userna
  2 {"pid1":"20000001","username1":"21abcxyzxyzxyzxyzxyzxyzxyzxyzx","attr1":"21xyzabcabcabca","pid2":"20
  3 {"pid1":"30000001","username1":"31abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxy","attr1":"31xyzabcabcabcabc
  4 {"pid1":"40000001","username1":"41abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz","attr1":"41xyzab
  5 {"pid1":"50000001","username1":"51abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzx","attr1
  6 {"pid1":"60000001","username1":"61abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz
  7 {"pid1":"70000001","username1":"71abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz
  8 {"pid1":"80000001","username1":"81abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz
  9 {"pid1":"90000001","username1":"91abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyz
 10 {"pid1":"100000001","username1":"101abcxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzxyzx

10 rows selected.

column username1  format a25
column attr1      format a25
column username30 format a25
column attr30     format a25

-- Use in JSON_TABLE (substr just for SQL*Plus output)

select t.id
     , jt.pid1
     , substr(jt.username1 , 1, 25) as username1
     , substr(jt.attr1     , 1, 25) as attr1
       /* etc etc etc */
     , jt.pid30
     , substr(jt.username30, 1, 25) as username30
     , substr(jt.attr30    , 1, 25) as attr30
  from t
     , json_table(
          '{"' || replace(replace(replace(rtrim(c,'~'), '"', '\"'), '=', '":"'), '~', '","') || '"}'
        , '$'
          columns (
             pid1       number         path '$.pid1'
           , username1  varchar2(4000) path '$.username1'
           , attr1      varchar2(4000) path '$.attr1'
             /* etc etc etc */
           , pid30      number         path '$.pid30'
           , username30 varchar2(4000) path '$.username30'
           , attr30     varchar2(4000) path '$.attr30'
          )
       ) jt
;

 ID       PID1 USERNAME1                 ATTR1                          PID30 USERNAME30             ATTR30
--- ---------- ------------------------- ------------------------- ---------- ------------------------- -------------------------
  1   10000001 11abcxyzxyzxyzxyzxyz      11xyzabcab                  10000030 130abcxyzxyzxyzxyzxyzxyzx 130xyzabcabcabcabcabcabca
  2   20000001 21abcxyzxyzxyzxyzxyzxyzxy 21xyzabcabcabca             20000030 230abcxyzxyzxyzxyzxyzxyzx 230xyzabcabcabcabcabcabca
  3   30000001 31abcxyzxyzxyzxyzxyzxyzxy 31xyzabcabcabcabcabc        30000030 330abcxyzxyzxyzxyzxyzxyzx 330xyzabcabcabcabcabcabca
  4   40000001 41abcxyzxyzxyzxyzxyzxyzxy 41xyzabcabcabcabcabcabcab   40000030 430abcxyzxyzxyzxyzxyzxyzx 430xyzabcabcabcabcabcabca
  5   50000001 51abcxyzxyzxyzxyzxyzxyzxy 51xyzabcabcabcabcabcabcab   50000030 530abcxyzxyzxyzxyzxyzxyzx 530xyzabcabcabcabcabcabca
  6   60000001 61abcxyzxyzxyzxyzxyzxyzxy 61xyzabcabcabcabcabcabcab   60000030 630abcxyzxyzxyzxyzxyzxyzx 630xyzabcabcabcabcabcabca
  7   70000001 71abcxyzxyzxyzxyzxyzxyzxy 71xyzabcabcabcabcabcabcab   70000030 730abcxyzxyzxyzxyzxyzxyzx 730xyzabcabcabcabcabcabca
  8   80000001 81abcxyzxyzxyzxyzxyzxyzxy 81xyzabcabcabcabcabcabcab   80000030 830abcxyzxyzxyzxyzxyzxyzx 830xyzabcabcabcabcabcabca
  9   90000001 91abcxyzxyzxyzxyzxyzxyzxy 91xyzabcabcabcabcabcabcab   90000030 930abcxyzxyzxyzxyzxyzxyzx 930xyzabcabcabcabcabcabca
 10  100000001 101abcxyzxyzxyzxyzxyzxyzx 101xyzabcabcabcabcabcabca  100000030 1030abcxyzxyzxyzxyzxyzxyz 1030xyzabcabcabcabcabcabc

10 rows selected.



Hope that's useful ;-)

Cheerio
/Kim

Connor McDonald
June 07, 2018 - 1:26 am UTC

nice stuff.

More on JSON_TABLE

Stew Ashton, June 11, 2018 - 8:37 pm UTC

Thanks to Kim for the reference!

Marc Bleron blogged about CSV CLOBs and JSON_TABLE two years ago:
https://odieweblog.wordpress.com/2016/04/22/simple-csv-parsing-using-xmltable-or-json_table/
but he used JSON arrays, not objects.

The OP said "up to 60 attributes" so I suppose some attributes may be missing in some records. I thought I'd emulate that, and also put the attributes in random order, just to emphasize what Kim has already shown. To make things easier for me, I'm going to add newlines after each record.

First, I have a question: I have read that to load a large CLOB, it's better to use a lob locator for a persistent LOB. Is this code OK? (See the two lines after BEGIN)
create table t(c clob);

declare
  l_clob clob;
begin
  insert into t values(empty_clob()) returning c into l_clob;
  dbms_lob.open(l_clob, dbms_lob.lob_readwrite);
  for rec in (
    with data as (
      select level+1 n, 'a'||(level+1) || '=' || 'v\'||(level+1) || '~' str
      from dual
      connect by level <= 9
    )
    , randoms as (
      select level+1 n, floor(dbms_random.value(2,9.9999)) r
      from dual
      connect by level <= 100
    )
    select 'pid='||(r.n-1)||'~'||
      listagg(str) within group(order by dbms_random.random)
      || chr(10) str
    from data d, randoms r
    where d.n != r.r
    group by r.n
  ) loop
    dbms_lob.writeappend(l_clob, length(rec.str), rec.str);
  end loop;
  dbms_lob.close(l_clob);
  commit;
exception when others then
  if dbms_lob.isopen(l_clob) = 1 then
    dbms_lob.close(l_clob);
  end if;
  raise;
end;
/
set long 200
select substr(c,1,200) from t;


SUBSTR(C,1,200)                                                                 
--------------------------------------------------------------------------------
pid=1~a5=v\5~a2=v\2~a3=v\3~a10=v\10~a7=v\7~a8=v\8~a6=v\6~a4=v\4~a9=v\9~
pid=2~a6=v\6~a4=v\4~a8=v\8~a3=v\3~a5=v\5~a2=v\2~a10=v\10~
pid=3~a4=v\4~a7=v\7~a10=v\10~a8=v\8~a5=v\5~a9=v\9~a2=v\2~
pid=4~a4=v\4


Now, here is a pipelined table function that will break the CLOB down into VARCHAR2-size chunks for processing. I think this will scale much better for large volumes.
create or replace function pipe_clob (
  p_clob in clob,
  p_max_lengthb in integer default 4000,
  p_rec_term in varchar2 default '
'
) return sys.odcivarchar2list pipelined authid current_user as
  l_amount integer;
  l_offset integer;
  l_buffer varchar2(4000 CHAR);
  l_out varchar2(4000 BYTE);
  l_buff_lengthb integer;
  l_occurence integer;
  l_rec_term_length integer := length(p_rec_term);
begin
  l_amount := p_max_lengthb;
  l_offset := 1;
  while l_amount = p_max_lengthb loop
    begin
      DBMS_LOB.READ (
       p_clob,
       l_amount,
       l_offset,
       l_buffer
      );
    exception when no_data_found then
      l_amount := 0;
    end;
    if l_amount > 0 then
      l_buff_lengthb := p_max_lengthb + 1;
      l_occurence := 0;
      while l_buff_lengthb > p_max_lengthb loop
        l_occurence := l_occurence + 1;
        l_buff_lengthb := instrb(l_buffer,p_rec_term,-1, l_occurence);
      end loop;
      if l_buff_lengthb = 0 then
        l_buff_lengthb := lengthb(l_buffer);
      end if;
      l_out := substrb(l_buffer, 1, l_buff_lengthb-l_rec_term_length);
      pipe row(l_out);
      l_offset := l_offset + length(l_out) + l_rec_term_length;
    end if;
  end loop;
  return;
end;
/

After that:
- I call JSON_ARRAY on each chunk to escape characters if necessary.
- then I turn each record into an object, but put all the objects inside an array.
- JSON_TABLE breaks down the array and puts each value in the correct column.
with json_data as (
  select
  replace(
    replace(
      replace(
        '[{'||substr(json_array(column_value),2,length(json_array(column_value))-4)||'"}]',
        '~\n',
        '"},{"'
      ),
      '=',
      '":"'
    ),
    '~',
    '","'
  )
   jstr
  from pipe_clob((select c from t), 3300)
)
select r.* from json_data j, json_table(
  j.jstr, '$[*]' columns
    pid varchar2(9) path '$.pid',
    a2 varchar2(9) path '$.a2',
    a3 varchar2(9) path '$.a3',
    a4 varchar2(9) path '$.a4',
    a5 varchar2(9) path '$.a5',
    a6 varchar2(9) path '$.a6',
    a7 varchar2(9) path '$.a7',
    a8 varchar2(9) path '$.a8',
    a9 varchar2(9) path '$.a9'
) r
where rownum <= 10;

PID       A2        A3        A4        A5        A6        A7        A8        A9       
--------- --------- --------- --------- --------- --------- --------- --------- ---------
1         v\2       v\3       v\4       v\5       v\6       v\7       v\8       v\9      
2         v\2       v\3       v\4       v\5       v\6                 v\8                
3         v\2                 v\4       v\5                 v\7       v\8       v\9      
4         v\2       v\3       v\4       v\5       v\6       v\7       v\8       v\9      
5         v\2       v\3       v\4       v\5       v\6       v\7       v\8       v\9      
6         v\2                 v\4       v\5       v\6       v\7       v\8       v\9      
7         v\2       v\3       v\4       v\5       v\6       v\7                 v\9      
8         v\2       v\3       v\4       v\5       v\6       v\7       v\8                
9         v\2       v\3       v\4       v\5       v\6       v\7       v\8       v\9      
10        v\2       v\3       v\4       v\5       v\6       v\7       v\8       v\9
Best regards,
Stew
Connor McDonald
June 12, 2018 - 1:46 am UTC

I have read that to load a large CLOB, it's better to use a lob locator for a persistent LOB

Do you have a reference?

The Disappearing Reference

Stew Ashton, June 12, 2018 - 12:39 pm UTC

You ask: "Do you have a reference?"

I thought I had read something two days previously. I find nothing that fits my (imaginary?) memory, but I did find this:

https://docs.oracle.com/en/database/oracle/oracle-database/12.2/adlob/working-with-LOBs.html#GUID-F61CA075-8CB1-4E0B-B538-D886F6CAE263

"Before you can start writing data to a persistent LOB using ... PL/SQL ... , you must make the LOB column/attribute non-NULL.

You can make a LOB column/attribute non-NULL by initializing the persistent LOB to empty, using an INSERT/UPDATE statement with the function ... EMPTY_CLOB for CLOBs and NCLOBs."

Wouldn't that be more efficient than populating a temporary LOB in a temp tablespace, then copying that LOB to a normal tablespace?

Anyway I wanted your opinion and now I have it, thanks.

Best regards,
Stew
Connor McDonald
June 13, 2018 - 2:05 am UTC

Different use cases I think here - my demo simply wanted a nice large clob to work with as source. It is/was a transient thing, so I think a temporary clob makes sense.

Similarly, if I'm writing to a clob as part of an application but ultimately I'll be discarding it, I'll go temporary.

But if I'm building a clob that will persist forever, then I'll probably go permanent.

Why the caveat of "probably"? Because I might be doing 10,000 operations on the clob before I reach my "final state", all of which would logged, have read consistent chunk retention etc. So in that instance, I might go temporary for the 9,999 operations and transfer to permanent for that final operation to make it persist.

thank you

Robert, June 12, 2018 - 6:55 pm UTC

Thank you for the response. I tested this against my tables and it works very well!
Connor McDonald
June 13, 2018 - 1:59 am UTC

glad we could help

Thanks

Stew Ashton, June 13, 2018 - 4:35 am UTC

Connor said: "Different use cases I think here - my demo simply wanted a nice large clob to work with as source. It is/was a transient thing, so I think a temporary clob makes sense."

Yes, clearly. I was asking about a different use case and should have stated that more explicitly.

"...I might be doing 10,000 operations on the clob before I reach my "final state", all of which would logged, have read consistent chunk retention etc. So in that instance, I might go temporary for the 9,999 operations and transfer to permanent for that final operation to make it persist."

Good point! Thanks for the in depth answer, much appreciated.

Best regards,
Stew

CSV string

Anton, January 16, 2023 - 8:05 am UTC

Hello!
I receive from web-site response to my out_put in CSV string.
How can I parse this string, and insert into my table:
First line)"Nr. crt.","AWB","Client dest","Telefon dest","StradaDestinatar","NrDestinatar","BlocDestinatar","ScaraDestinatar","EtajDestinatar","ApDestinatar","Oras dest","Orasel","Plic","Colet","kg","Continut","Plata la","Val. decl.","Ramburs","Obs.","PersExpeditor","PersDest","DepNr","KmExtDest","Data AWB","Ora AWB","Ridicat","Centru Cost","Status","Data confirmarii","Ora confirmarii","Nume confirmare","Client exp","Restituire","Tip serviciu","Banca","IBAN","AWB Retur","Judet dest","Expeditor - Judet","Expeditor - Cod postal"
Second line)"1","1013392030001","ESTEL SVX GROUP SRL Iasi","0759531825","() 1 Tomesti, Str. C.Cirnat.nr. H/37","","","","","","Braila","Braila","0","0","1.00","PRODUSE LA SCHIMB!","ESTEL SVX GROUP SRL Iasi",".00","","Restituire la AWB 6012306900018","Veaceslav Tudor","ESTEL SVX GROUP SRL Iasi","","","10.01.2023","00:00:00","DA","","Expeditie in livrare","","","","VeaceslavTudor","","Standard","","","","Iasi","",""
Third line)"2","6013306900003","Rusu Madalina","0754693137","Dimitrie Dan (Strada) 64 Dimitrie Dan (Strada), nr. 26, bloc salonjully","","","","","","Bohani","Radauti","0","1","3.00","","ESTEL SVX GROUP SRL Iasi",".00","276.00","Livrare urgenta;A se contacta telefonic;Atentie-FRAGIL","ESTEL SVX GROUP SRL Iasi","Rusu Madalina","","","10.01.2023","00:00:00","DA","","Livrat","14.01.2023","12:29","daiana","ESTEL SVX GROUP SRL Iasi","","Cont Colector","","","","Suceava","","" ?

In the first line are the names of the columns, in second and third lines are values.
There are empty values in double quotes that can appear in different places, there are also commas inside the values themselves - so "comma" cannot act as a separator.
Please, help me to find a solution in this case!
Connor McDonald
January 17, 2023 - 1:51 am UTC

Here's an example of walking along the string and parsing out the fields (including the embedded commas)

SQL>   set serverout on
SQL> declare
  2    l_string varchar2(200) := '"1","1013392030001","ESTEL SVX GROUP SRL Iasi","0759531825","() 1 Tomesti, Str. C.Cirnat.nr. H/37","","","","","","Braila","Braila","0","0"';
  3    l_in_quote boolean := false;
  4    l_char varchar2(1);
  5    l_word varchar2(100);
  6  begin
  7    for i in 1 .. length(l_string)
  8    loop
  9      l_char := substr(l_string,i,1);
 10      if l_char = '"' then
 11        l_in_quote := not l_in_quote;
 12      elsif l_char = ',' then
 13        if not l_in_quote then
 14          dbms_output.put_line(case when l_word is null then '*null*' else l_word end);
 15          l_word := null;
 16        end if;
 17      else
 18        l_word := l_word || l_char;
 19      end if;
 20    end loop;
 21    dbms_output.put_line(l_word);
 22  end;
 23  /
1
1013392030001
ESTEL SVX GROUP SRL Iasi
0759531825
() 1 Tomesti Str. C.Cirnat.nr. H/37
*null*
*null*
*null*
*null*
*null*
Braila
Braila
0
0

PL/SQL procedure successfully completed.


That should get you on the way to building insert statements

Re: CSV string

Stew Ashton, January 17, 2023 - 9:16 am UTC

The output looks like valid content of a JSON array, and commas within quoted strings are not a problem. (If there can be double quotes within a value, that would require a slight addition.)
with jdata(jarray) as (
  select '['||
  '"1","1013392030001","ESTEL SVX GROUP, SRL Iasi","0759531825","() 1 Tomesti, Str. C.Cirnat.nr. H/37","","","","","","Braila","Braila","0","0","1.00","PRODUSE LA SCHIMB!","ESTEL SVX GROUP SRL Iasi",".00","","Restituire la AWB 6012306900018","Veaceslav Tudor","ESTEL SVX GROUP SRL Iasi","","","10.01.2023","00:00:00","DA","","Expeditie in livrare","","","","VeaceslavTudor","","Standard","","","","Iasi","",""'
  ||']'
  from dual
)
select
  json_value(jarray, '$[0]') c0,
  json_value(jarray, '$[1]') c1,
  json_value(jarray, '$[2]') c2,
  json_value(jarray, '$[3]') c3,
  json_value(jarray, '$[4]') c4,
  json_value(jarray, '$[5]') c5,
  json_value(jarray, '$[6]') c6,
  json_value(jarray, '$[7]') c7,
  json_value(jarray, '$[8]') c8,
  json_value(jarray, '$[9]') c9,
  json_value(jarray, '$[10]') c10,
  json_value(jarray, '$[11]') c11,
  json_value(jarray, '$[12]') c12,
  json_value(jarray, '$[13]') c13,
  json_value(jarray, '$[14]') c14,
  json_value(jarray, '$[15]') c15,
  json_value(jarray, '$[16]') c16,
  json_value(jarray, '$[17]') c17,
  json_value(jarray, '$[18]') c18,
  json_value(jarray, '$[19]') c19,
  json_value(jarray, '$[20]') c20,
  json_value(jarray, '$[21]') c21,
  json_value(jarray, '$[22]') c22,
  json_value(jarray, '$[23]') c23,
  json_value(jarray, '$[24]') c24,
  json_value(jarray, '$[25]') c25,
  json_value(jarray, '$[26]') c26,
  json_value(jarray, '$[27]') c27,
  json_value(jarray, '$[28]') c28,
  json_value(jarray, '$[29]') c29,
  json_value(jarray, '$[30]') c30,
  json_value(jarray, '$[31]') c31,
  json_value(jarray, '$[32]') c32,
  json_value(jarray, '$[33]') c33,
  json_value(jarray, '$[34]') c34,
  json_value(jarray, '$[35]') c35,
  json_value(jarray, '$[36]') c36,
  json_value(jarray, '$[37]') c37,
  json_value(jarray, '$[38]') c38,
  json_value(jarray, '$[39]') c39,
  json_value(jarray, '$[40]') c40,
  json_value(jarray, '$[41]') c41
from jdata;

C0    C1               C2                           C3            C4                                      C5    C6    C7    C8    C9    C10       C11       C12    C13    C14     C15                   C16                         C17    C18    C19                                C20                C21                         C22    C23    C24           C25         C26    C27    C28                     C29    C30    C31    C32               C33    C34         C35    C36    C37    C38     C39    C40    C41    
1     1013392030001    ESTEL SVX GROUP, SRL Iasi    0759531825    () 1 Tomesti, Str. C.Cirnat.nr. H/37                                  Braila    Braila    0      0      1.00    PRODUSE LA SCHIMB!    ESTEL SVX GROUP SRL Iasi    .00           Restituire la AWB 6012306900018    Veaceslav Tudor    ESTEL SVX GROUP SRL Iasi                  10.01.2023    00:00:00    DA            Expeditie in livrare                         VeaceslavTudor           Standard                         Iasi                         

Connor McDonald
January 18, 2023 - 7:11 am UTC

nice stuff

CSV string

Anton, January 18, 2023 - 11:45 am UTC

Thanks for your help, Stew Ashton's version worked for me!


Connor McDonald
January 19, 2023 - 12:02 am UTC

glad it worked out

More to Explore

PL/SQL demos

Check out more PL/SQL tutorials on our LiveSQL tool.

PL/SQL docs

PL/SQL reference manual from the Oracle documentation library