Reputation: 35
I have a query that uses dblink to backfill data from one database and insert it into a jsonb column in another database. It only needs to run on 1000 records or so, but it takes a long time to run (between 30s to a minute).
Why? How do I make this faster?
Here is the query:
CREATE EXTENSION dblink;
update "table" t1 set jsonb_example_column = jsonb_set(jsonb_example_column, '{user}', jsonb_build_object('name',jsonb_example_column->'user'->'name','phone',jsonb_example_column->'user'->'phone','address',jsonb_example_column->'user'->'address','user_id', other_db.user_id))
FROM dblink('dbname=$DB_NAME
port=$DB_PORT
host=$DB_HOST
user=$DB_USER
password=$DB_PASSWORD',
'SELECT user_id, external_id from other_db_table t2')
AS other_db("user_id" uuid, external_id uuid) WHERE other_db.external_id = t1.external_id;"
Upvotes: 1
Views: 83
Reputation: 35
Answer is thanks to a_horse_with_no_name, but here is the optimized query that now takes a third of the time:
CREATE EXTENSION IF NOT EXISTS postgres_fdw;
CREATE SERVER user FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host '0.0.0.0', port '5432', dbname 'user');
CREATE USER MAPPING FOR postgres SERVER user OPTIONS (user 'your_foreign_db_user', password 'your_foreign_db_password');
GRANT USAGE ON FOREIGN SERVER user TO postgres;
CREATE FOREIGN TABLE IF NOT EXISTS foreign_table(user_id uuid, external_id uuid) SERVER user OPTIONS (schema_name 'public', table_name 'foreign_db_table');
UPDATE "local_db_table" lt SET jsonb_example_column = jsonb_set(jsonb_example_column, '{user}', jsonb_build_object('name',jsonb_example_column->'user'->'name','phone',jsonb_example_column->'user'->'phone','address',jsonb_example_column->'user'->'address','user_id', ft.user_id))
FROM foreign_table AS ft WHERE lt.external_id = ft.external_id;
Upvotes: 1