Reputation: 524
As my questions indicates I'd like some advice on how to optimize this sql query. Due to its relative slow performance (~10s) when returning lots of results (>~250) some serverless functions time out on my backend.
Tables:
word 52.000 rows
sense 70.000 rows
inflection 1.700 rows
sql:
create table word (id int8, level int2, homonymnumber int2, alpha int8, frequency int8, hangul text, hanja text, pronounciation text, audio text, typeeng text, typekr text)
create table inflection (inflectionid int8, id int8, inflections text, inflectionlinks json)
create table sense (senseid int8, id int8, translation text, definition text, krdefinition text, sensenr int2)
Any help is much appreciated.
sql:
select json_agg(
distinct jsonb_build_object(
'item',
jsonb_build_object(
'Id',
word.id,
'Alpha',
word.alpha,
'Frequency',
word.frequency,
'HomonymNumber',
word.homonymnumber,
'Pronounciation',
word.pronounciation,
'Audio',
word.audio,
'TypeKr',
word.typekr,
'TypeEng',
word.typeeng,
'Hanja',
word.hanja,
'Hangul',
word.hangul,
'Senses',
(
select json_agg(
jsonb_build_object(
'Translation',
sense.translation,
'Definition',
sense.definition,
'DefinitionKr',
sense.definitionkr,
'SenseNr',
sense.sensenr
)
)
from sense
where sense.id = word.id
),
'Inflection',
inflection.inflections,
'InflectionLinks',
inflection.inflectionlinks
)
)
)
from word
full join inflection on word.id = inflection.id
right join sense on word.id = sense.id
where word.hangul similar to concat('%', term, '%')
or sense.translation similar to concat('%', term, '%')
or inflection.inflections similar to concat('%', term, '%')
Upvotes: 0
Views: 141
Reputation: 3303
Try row_to_json function on postgresql, same time your query will be simple. Fox example:
CREATE TABLE books (
id int4 NOT NULL,
bookcode int4 NULL,
bookname varchar NULL,
CONSTRAINT books_pk PRIMARY KEY (id)
);
INSERT INTO books (id, bookcode, bookname) VALUES(3, 1245, 'Book1');
INSERT INTO books (id, bookcode, bookname) VALUES(4, 1655, 'Book5');
INSERT INTO books (id, bookcode, bookname) VALUES(5, 2211, 'Book4');
INSERT INTO books (id, bookcode, bookname) VALUES(6, 1219, 'Book8');
INSERT INTO books (id, bookcode, bookname) VALUES(7, 9955, 'Book2');
INSERT INTO books (id, bookcode, bookname) VALUES(1, 2267, 'Book1');
INSERT INTO books (id, bookcode, bookname) VALUES(2, 12322, 'Book2');
INSERT INTO books (id, bookcode, bookname) VALUES(8, 43543, 'Book10');
After:
select row_to_json(a) from books a
Result:
{"id":3,"bookcode":1245,"bookname":"Book1"}
{"id":4,"bookcode":1655,"bookname":"Book5"}
{"id":5,"bookcode":2211,"bookname":"Book4"}
{"id":6,"bookcode":1219,"bookname":"Book8"}
{"id":7,"bookcode":9955,"bookname":"Book2"}
{"id":1,"bookcode":2267,"bookname":"Book1"}
{"id":2,"bookcode":12322,"bookname":"Book2"}
{"id":8,"bookcode":43543,"bookname":"Book10"}
Or
select jsonb_build_object('item', jsonb_agg(row_to_json(a))) from books a
Result:
{
"item": [
{
"id": 3,
"bookcode": 1245,
"bookname": "Book1"
},
{
"id": 4,
"bookcode": 1655,
"bookname": "Book5"
},
{
"id": 5,
"bookcode": 2211,
"bookname": "Book4"
},
{
"id": 6,
"bookcode": 1219,
"bookname": "Book8"
},
{
"id": 7,
"bookcode": 9955,
"bookname": "Book2"
},
{
"id": 1,
"bookcode": 2267,
"bookname": "Book1"
},
{
"id": 2,
"bookcode": 12322,
"bookname": "Book2"
},
{
"id": 8,
"bookcode": 43543,
"bookname": "Book10"
}
]
}
Field names you can change with using aliases.
Upvotes: 1