0 Replies Latest reply on Jul 4, 2016 10:00 AM by Erik Raetz

    How to read tens of thousands data sets from json array in chunks

    Erik Raetz

      Following is an extraction of our solution.

      Post body is a json array of employees.

       

      1.) Create a POST service using PL/SQL block.

       

        DECLARE

        BEGIN

          PostData(prequest  => :body);

        END;

       

      2.) PostData procedure

       

      Writes request body to table field of type CLOB with a trigger for IS JSON (STRICT).

      Reads json data with json_table select into a cursor.

      Reads cursor in chunks of 100.

       

        PROCEDURE PostData(

          prequest          IN BLOB

        )

        IS

       

          CURSOR curData(taid IN NUMBER)

          IS

            SELECT

              jt.*

            FROM

              transaction ta,

              json_table(ta.ta_request, '$'

                COLUMNS(

                  NESTED PATH '$.employees[*]'

                    COLUMNS (

                      i_index FOR ORDINALITY,

                      --exists

                      e_o_employees NUMBER EXISTS PATH '$',

                      e_salutation NUMBER EXISTS PATH '$.salutation',

                      e_title NUMBER EXISTS PATH '$.title',

                      e_foresurname NUMBER EXISTS PATH '$.foresurname',

                      e_surname NUMBER EXISTS PATH '$.surname',

                      e_firm1 NUMBER EXISTS PATH '$.firm1',

                      e_firm2 NUMBER EXISTS PATH '$.firm2',

                      e_street NUMBER EXISTS PATH '$.street',

                      e_housenumber NUMBER EXISTS PATH '$.housenumber',

                      e_zip NUMBER EXISTS PATH '$.zip',

                      e_city NUMBER EXISTS PATH '$.city',

                      e_country NUMBER EXISTS PATH '$.country',

                      --data

                      d_salutation VARCHAR2(1000) PATH '$.salutation',

                      d_title VARCHAR2(1000) PATH '$.title',

                      d_foresurname VARCHAR2(1000) PATH '$.foresurname',

                      d_surname VARCHAR2(1000) PATH '$.surname',

                      d_firm1 VARCHAR2(1000) PATH '$.firm1',

                      d_firm2 VARCHAR2(1000) PATH '$.firm2',

                      d_street VARCHAR2(1000) PATH '$.street',

                      d_housenumber VARCHAR2(1000) PATH '$.housenumber',

                      d_zip VARCHAR2(1000) PATH '$.zip',

                      d_city VARCHAR2(1000) PATH '$.city',

                      d_country VARCHAR2(1000) PATH '$.country'

                    )

                )

              ) jt

            WHERE

              ta.ta_id = taid;

       

          TYPE tabCurData IS TABLE OF curData%ROWTYPE INDEX BY PLS_INTEGER;

          ltabCurData tabCurData;

       

          ltaid           NUMBER;

       

        BEGIN

       

          --converts prequest blob to clob and writes clob to table: transaction, field: ta_request

          --ta_request has trigger for IS JSON (STRICT)

          --returns primary key

          ltaid := CreateTransaction(prequest => prequest);

       

          --read data from transaction clob in chunks of 100

          BEGIN

            OPEN curData(ltaid);

            LOOP

              FETCH curData BULK COLLECT INTO ltabCurData LIMIT 100;

              FOR i IN 1 .. ltabCurData.COUNT

              LOOP

                EXIT WHEN ltabCurData(i).e_o_employees = 0;

                --index

                htp.p(ltabCurData(i).i_index);

                --exists surname

                htp.p(ltabCurData(i).e_surname);

                --data surname

                htp.p(ltabCurData(i).d_surname);

              END LOOP;

              EXIT WHEN ltabCurData.COUNT = 0;

            END LOOP;

            CLOSE curData;

          EXCEPTION

            WHEN OTHERS THEN

              htp.p(SQLERRM);

          END;

       

        END PostData;