Reputation: 15
There is a request for example:
with j (sJson) as (
select '{
"ID":"1444284517",
"ID_ORD":"4255;2187606199",
"Vals":{
"CODE":"ONB2B3BB8",
"DORD":"25.04.2021"
}
}'
from dual
)
select jt.*
from j
cross apply json_table (j.sJson, '$'
columns
ID varchar2(32) path '$.ID',
ID_ORD varchar2(32) path '$.ID_ORD',
nested path '$.Vals[*]'
columns (
CODE varchar2(9) path '$.CODE',
DORD varchar2(30) path '$.DORD',
....
)) jt
In column
there may be different fields.
How to list all fields in columns
without specifying the type and path? That is how to make it dynamic parser? Need to give up CODE varchar2(9) path '$.CODE'
I expect this result:
| ID | ID_ORD | CODE | DORD |
+------------+-----------------+-----------+------------+
| 1444284517 | 4255;2187606199 | ONB2B3BB8 | 25.04.2021 |
Upvotes: 0
Views: 4174
Reputation: 470
SQL/JSON function json_table projects specific JSON data to columns of various SQL data types. You use it to map parts of a JSON document into the rows and columns of a new, virtual table, which you can also think of as an inline view.
Because your goal seems to be : Parse json through json_table. I suggest to read the JSON Data Guide feature.here. e;g You can create a view based on the structure of a row, the DBMS_JSON package will create the SQL/JSON for you. No need to code. You can also add virtual columns to your original table.
I added a new field XARR in order to show the power of JSON Data Guide feature
drop table test_json;
create table test_json
as
with j (sJson ) as (
select CAST ('{
"ID":"1444284517",
"ID_ORD":"4255;2187606199",
"Vals":{
"CODE":"ONB2B3BB8",
"DORD":"25.04.2021",
"XARR":[{"IDARR":1},{"IDARR":2},{"IDARR":3}]
}
}' as VARCHAR2(2000))
from dual
)
select *
from j;
SELECT json_dataguide(SJSON, DBMS_JSON.FORMAT_HIERARCHICAL, DBMS_JSON.PRETTY)
FROM TEST_JSON;
drop view MYVIEW;
DECLARE
dg CLOB;
BEGIN
SELECT json_dataguide(SJSON, DBMS_JSON.FORMAT_HIERARCHICAL, DBMS_JSON.PRETTY)
INTO dg
FROM TEST_JSON where rownum < 2;
DBMS_JSON.create_view('MYVIEW',
'TEST_JSON',
'SJSON',
dg);
END;
/
use directly the view or retrieve the SQL generated
select text from all_views where view_name = 'MYVIEW';
TEXT
-------------------------------------------------------------------
SELECT RT."SJSON",JT."ID",JT."CODE",JT."DORD",JT."ID_ORD",JT."IDARR"
FROM "ADMIN"."TEST_JSON" RT,
JSON_TABLE("SJSON", '$[*]' COLUMNS
"ID" varchar2(16) path '$.ID',
"CODE" varchar2(16) path '$.Vals.CODE',
"DORD" varchar2(16) path '$.Vals.DORD',
NESTED PATH '$.Vals.XARR[*]' COLUMNS (
"IDARR" number path '$.IDARR'),
"ID_ORD" varchar2(16) path '$.ID_ORD')JT
select * from myview;
ID CODE DORD ID_ORD IDARR
---------- --------- ---------- --------------- ---
1444284517 ONB2B3BB8 25.04.2021 4255;2187606199 1
1444284517 ONB2B3BB8 25.04.2021 4255;2187606199 2
1444284517 ONB2B3BB8 25.04.2021 4255;2187606199 3
Upvotes: 0
Reputation: 191275
If you really want a variable number of columns then you will either need to do a dynamic pivot of a column-based result (such as from @MTO's answer), or generate a dynamic json_table
.
You can use json_dataguide()
to generate the schema for a specific Vals
array, and then pass that through its own json_table
and loop to generate the columns
clause names, types and paths.
This example regenerates your original query, except it used a bind variable for the JSON string instead of a CTE; and then opens that using the same string that was used for the data guide.
create or replace function dynamic_parse(sJson clob)
return sys_refcursor as
sGuide clob;
sSQL clob;
rc sys_refcursor;
begin
-- initial static part of query
sSQL := q'^select jt.*
from json_table (:sJson, '$'
columns
ID varchar2(32) path '$.ID',
ID_ORD varchar2(32) path '$.ID_ORD',
nested path '$.Vals[*]'
columns (^';
select json_dataguide(jt.vals)
into sGuide
from json_table (sJson, '$'
columns
VALS clob format json path '$.Vals'
) jt;
for r in (
select jt.*
from json_table (sGuide format json, '$[*]'
columns
indx for ordinality,
path varchar2(30) path '$."o:path"',
type varchar2(30) path '$.type',
length number path '$."o:length"'
) jt
)
loop
sSQL := sSQL || case when r.indx > 1 then ',' end
|| chr(10) || ' '
|| '"' || substr(r.path, 3) || '"'
-- may need to handle other data type more carefully too
|| ' ' || case when r.type = 'string' then 'varchar2(' || r.length || ')' else r.type end
|| q'^ path '^' || r.path || q'^'^';
end loop;
-- final static part of query
sSQL := sSQL || chr(10) || ' )) jt';
dbms_output.put_line(sSQL);
open rc for sSQL using sJson;
return rc;
end;
/
db<>fiddle showing some of the steps, the generated dynamic SQL statement, and how you could use that to open a ref cursor. The generated dynamic statement comes out as:
select jt.*
from json_table (:sJson, '$'
columns
ID varchar2(32) path '$.ID',
ID_ORD varchar2(32) path '$.ID_ORD',
nested path '$.Vals[*]'
columns (
"CODE" varchar2(16) path '$.CODE',
"DORD" varchar2(16) path '$.DORD'
)) jt
It also shows a dummy anonymous block that calls the function and prints the ref cursor contents (because db<>fiddle doesn't support select func_returning_ref_cursor from dual
which you can do in SQL Developer etc.) as:
1444284517:4255;2187606199:ONB2B3BB8:25.04.2021
... but that demonstrates one of the problems with this approach: the caller has to know the number and types of columns in advance, or itself has to use some element of dynamic handling.
You may also want to explore the wider JSON Data Guide functionality.
Upvotes: 0
Reputation: 167972
You can define the functions:
CREATE FUNCTION get_keys(
value IN CLOB
) RETURN SYS.ODCIVARCHAR2LIST PIPELINED
IS
js JSON_OBJECT_T := JSON_OBJECT_T( value );
keys JSON_KEY_LIST;
BEGIN
keys := js.get_keys();
FOR i in 1 .. keys.COUNT LOOP
PIPE ROW ( keys(i) );
END LOOP;
END;
/
CREATE FUNCTION get_value(
value IN CLOB,
path IN VARCHAR2
) RETURN VARCHAR2
IS
js JSON_OBJECT_T := JSON_OBJECT_T( value );
BEGIN
RETURN js.get_string( path );
END;
/
Then use the query:
WITH j (sJson) as (
select '{
"ID":"1444284517",
"ID_ORD":"4255;2187606199",
"Vals":{
"CODE":"ONB2B3BB8",
"DORD":"25.04.2021"
}
}'
from dual
)
SELECT jt.id,
jt.id_ord,
k.COLUMN_VALUE AS Key,
get_value( jt.vals, k.COLUMN_VALUE ) AS value
FROM j
CROSS APPLY JSON_TABLE(
j.sjson,
'$'
COLUMNS (
id VARCHAR2(20) PATH '$.ID',
id_ord VARCHAR2(30) PATH '$.ID_ORD',
vals VARCHAR2(4000) FORMAT JSON PATH '$.Vals'
)
) jt
CROSS APPLY get_keys( jt.vals ) k
Which outputs:
ID ID_ORD KEY VALUE 1444284517 4255;2187606199 CODE ONB2B3BB8 1444284517 4255;2187606199 DORD 25.04.2021
(Note: SQL does not support a dynamic number of columns so you need to provide a fixed number of columns such as key
and value
and have the output as rows rather than columns.)
db<>fiddle here
Upvotes: 1