Postgresql : Best way to handle upsert with Jsonb data in Postgresql - sql

Trying to handle Put/Patch request in the below SP along with insert if new primary values are recieved if I get unique Key constraint which is item_id in our case
Primary Key : ItemId
If no unique key constraint insert into view
If unique key constraint occurs i.e. json which has same itemId upsert its properties viz. quantity,status,etc.
Stored procedure
CREATE OR REPLACE FUNCTION sp_post_items1(i_data jsonb)
RETURNS TABLE(
fulfiller_id varchar,
item_id varchar,
order_id varchar,
status_id integer,
item_updated_time timestamp without time zone)
AS $function$
DECLARE
itemId1 varchar := null;
statusId1 integer := 1000;
quantity1 numeric;
begin
--SELECT t->>'itemId' itemId ,t->>'statusId' statusId,(t->>'quantity')::numeric quantity INTO itemId1,statusId1,quantity1 FROM jsonb_array_elements(i_data -> 'items') t ;
INSERT INTO vw_item_status_detail(
fulfiller_id,
item_id,
order_id,
status_id,
sku_code,
decoration_technology,
quantity,
item_updated_time)
SELECT
i_data->>'fulfillerId' fulfillerId,
t->>'itemId' itemId,
i_data->>'orderId' orderId,
1000,
t->>'skuCode' skuCode,
t->>'decorationTechnology' decorationTechnology,
(t->>'quantity')::numeric quantity ,
NOW()
FROM jsonb_array_elements(i_data -> 'items') t ;
exception when unique_violation then
update vw_item_status_detail v1 set quantity = coalesce (quantity1 , V1.quantity ), status_id = coalesce (statusId1, V1.status_id ), item_updated_time = now() where v1.item_id = itemId1 ;
RETURN QUERY SELECT
v.fulfiller_id fulfiller_id,
v.item_id item_id,
v.order_id order_id,
v.status_id status_id,
v.item_updated_time item_updated_time
FROM vw_item_status_detail v
WHERE (v.order_id = (SELECT i_data->>'orderId') )
AND (v.fulfiller_id = (SELECT i_data->>'fulfillerId'));
END;
$function$
LANGUAGE plpgsql;
Sample sp call along with JSON
select * from sp_post_items1('{"orderId": "newtestput1",
"fulfillerId":"kv0fdt6cx7",
"orderDetailsUrl":"het",
"items":[
{
"attributes":
[{"name":"OracleSku","value":"DWj"},{"name":"taskId","value":"33a1595-e36769876c52"},{"name":"height","value":"5.5"},
{"name":"width","value":"32.004"},{"name":"productFamily","value":"DWT"},{"name":"template","value":"GI-AST70W"},
{"name":"labelInfo","value":"DWU-ROHS,LMQ-DBLU-BLU"},{"name":"decorationTechnology","value":"laserEngraving"},
{"name":"material","value":"Rubber LMQ + LHY"},{"name":"XYZ_barcode","value":"21234348.1"},{"name":"orderType","value":"text"},
{"name":"scheduledShipDate","value":"2020-09-12T23:59:00"},{"name":"orderReference","value":"21235677.9"},{"name":"docRefUrl","value":""},{"name":"additionalInfo","value":""}],
"decorationTechnology":"laserEngraving","itemDescription":"Test Sku for Oracle testing",
"itemId":"item1",
"manufacturingUrl":"htighess",
"skuCode":"CIM-QYXB3789","productName":"Test Sku for Oracle testing","quantity":"2000","taskId":"33a1ea44-1f45-4f2d-9595-e36769876c52"
},
{
"attributes":
[{"name":"OracleSku","value":"DWT-XXX-B3LUX-BB-C"},{"name":"taskId","value":"33a1ea44-1f45-4f6c52"},{"name":"height","value":"5.5"},
{"name":"width","value":"32.004"},{"name":"productFamily","value":"DWT"},{"name":"template","value":"GIFTSET-DWT-INDX-AST70W"},
{"name":"labelInfo","value":"DWK-DBLU-BLU"},{"name":"decorationTechnology","value":"laserEngraving"},
{"name":"material","value":"Rubber LMQ + LHY"},{"name":"XYZ_barcode","value":"21234348.1"},{"name":"orderType","value":"text"},
{"name":"scheduledShipDate","value":"2020-09-12T23:59:00"},{"name":"orderReference","value":"21235677.9"},{"name":"docRefUrl","value":""},{"name":"additionalInfo","value":""}],
"decorationTechnology":"laserEngraving","itemDescription":"Test Sku for Oracle testing",
"itemId":"item2",
"manufacturingUrl":"httpdfg",
"skuCode":"CIM-QYXB3789","productName":"Test Sku for Oracle testing","quantity":"1000","taskId":"33a1edfge36769876c52"
}
]
}'::jsonb)
tried of using a for loop but its not the most optimized way to handle such scenario
Tried approach: 1.To create a temp table , but was not successful

We can user from / not in clause combination clause to insert as well as update in this case

Related

why does my trigger function cause a null error?

PostgreSQL.
My trigger func:
CREATE OR REPLACE FUNCTION new_purchase() RETURNS TRIGGER AS $psql$
BEGIN
IF NOT EXISTS(
SELECT 1
FROM currents c
WHERE c.name = new.name
)
THEN
INSERT INTO currents (name, qty)
VALUES (new.name, new.qty);
ELSE
UPDATE currents
SET
qty = qty + new.qty
WHERE
name = new.name;
END IF;
return new;
END;
$psql$ language plpgsql;
Then my trigger:
create trigger new_purchase_trigger
after insert on purchases
for each statement
execute procedure new_purchase();
my two tables with which i am manipulating:
Purchases
CREATE TABLE purchases (
name VARCHAR,
date TIMESTAMP,
qty INTEGER,
price NUMERIC,
about VARCHAR
)
Currents
CREATE TABLE currents (
name VARCHAR,
qty INTEGER
)
My INSERT:
INSERT INTO purchases (name, date, qty, price, about)
VALUES ('cheese', '2022-07-31', 1000, 11500, 'holland');
And finally the error when i am adding data to purchases via INSERT INTO - VALUES:
ERROR: NULL value in column "name" of relation "currents" violates the NOT NULL constraint
DETAIL: Error string contains (null, null).
CONTEXT: SQL statement: "INSERT INTO currents (name, qty)
VALUES(new.name, new.qty)"
PL/pgSQL new_purchase() function, line 9, statement SQL statement
SQL state: 23502
You need a ROW level trigger for what you are trying to achieve. In a statement level trigger (for each statement) the record new is not populated:
create trigger new_purchase_trigger
after insert on purchases
for each row
execute procedure new_purchase();
Note that you can simplify your trigger function if you declare currents.name as the primary key:
CREATE OR REPLACE FUNCTION new_purchase()
RETURNS TRIGGER
AS $psql$
BEGIN
INSERT INTO currents (name, qty)
VALUES (new.name, new.qty)
on conflict (name) do update
set qty = currents.qty + excluded.qty;
return new;
END;
$psql$
language plpgsql;

PostgreSQL: Trigger INSERT INTO SELECT from other table

I'm trying to create a trigger that will add a new row processed entry to a destination table each time a new row is created in the source table.
Step 1 Create destination table:
CREATE TABLE public.destination_table (
id serial PRIMARY KEY,
created_at TIMESTAMP NOT NULL,
sale_id INTEGER NOT NULL,
product_id INTEGER NOT NULL,
product_name VARCHAR NOT NULL,
url VARCHAR NOT NULL,
shop_id VARCHAR NOT NULL,
user_id VARCHAR)
Step 2 Create trigger function:
CREATE OR REPLACE FUNCTION triger_function() RETURNS TRIGGER AS
$BODY$
BEGIN
INSERT INTO public.destination_table ( created_at, sale_id, product_id, product_name, url, shop_id, user_id)
SELECT created_at,
sale_id,
product_id,
product_name,
split_part(url::text, '?'::text, 1) AS url,
shop_id,
((((((((data #>> '{}'::text[])::jsonb) #>> '{}'::text[])::jsonb) -> 'local_storage'::text) -> 'data'::text) #>> '{}'::text[])::jsonb) ->> 'user_id'::varchar AS user_id
FROM source_table;
RETURN new;
END;
$BODY$
language plpgsql;
** The Select query inside function work normally when single running.
Step 3 Create trigger:
CREATE TRIGGER trigger_records
AFTER INSERT ON public.source_table
FOR EACH ROW
EXECUTE PROCEDURE triger_function();
The problem is that Trigger does not work, which means it does not record new entries in the target table. Can't figure out where the error is.
You should be using the NEW record in the trigger function to reference the newly inserted data instead of a select, i.e.:
CREATE OR REPLACE FUNCTION triger_function() RETURNS TRIGGER AS
$BODY$
BEGIN
INSERT INTO public.destination_table ( created_at, sale_id, product_id, product_name, url, shop_id, user_id)
VALUES(NEW.created_at,
NEW.sale_id,
NEW.product_id,
NEW.product_name,
split_part(NEW.url::text, '?'::text, 1),
NEW.shop_id,
((((((((NEW.data #>> '{}'::text[])::jsonb) #>> '{}'::text[])::jsonb) -> 'local_storage'::text) -> 'data'::text) #>> '{}'::text[])::jsonb) ->> 'user_id'::varchar)
RETURN new;
END;
$BODY$
language plpgsql;

Postgresql Function for Inserting Data With Join

i tried to do create postgres sql function to insert data from parameter that given in the function.
For example There are 2 tables:
Table Customer
Customer
customer_id
customer_name
customer_phone
Table Transaction
seller_id
customer_name
customer_phone
price
item_name
My current function looks like:
CREATE OR REPLATE FUNCTON add_transaction_detail(seller_id int, customer_id int, price int, item_name varchar)
RETURNS character varying
LANGUAGE plpsql
SECURITY DEFINER
AS $function$
DECLARE
seller_id ALIAS FOR $1;
customer_id ALIAS FOR $2;
BEGIN
INSERT INTO transaction(seller_id, customer_name, customer_phone, item_name, price)
SELECT seller_id, c.customer_name, c.customer_phone, item_name, price
FROM table1 t JOIN customer c ON t.customer_id=c.customer_id
END;
$function$
i found error in my insert code. is there any way to set the parameter into 1 table then join them ? thanks

Postgresql :Syntax issue in MERGE with JSON param in stored function

Facing syntax issue when I have json input, which needs to be insert/update values from JSON into the table which is a view
Expected
When Primary id matched with Item_id (PK) - update its corresponding values
When Primary id not matched with Item_id (PK) - insert into the view
Sample SP
drop function if exists sp_post_itemsx;
CREATE FUNCTION sp_post_itemsx(i_data jsonb)
RETURNS VOID
AS $function$
BEGIN
MERGE INTO vw_item_status_detail
USING
(SELECT
i_data->>'fulfillerId' fulfillerId,
t->>'itemId' itemId,
i_data->>'orderId' orderId,
1000,
t->>'skuCode' skuCode,
t->>'decorationTechnology' decorationTechnology,
(t->>'quantity')::numeric quantity ,
NOW()
FROM jsonb_array_elements(i_data -> 'items')) as t ON item_id = t.itemId
WHEN MATCHED THEN UPDATE SET
vw.quantity = t.quantity
WHEN NOT MATCHED THEN
INSERT INTO vw_item_status_detail(
fulfiller_id,
item_id,
order_id,
status_id,
sku_code,
decoration_technology,
quantity,
item_updated_time)
SELECT
i_data->>'fulfillerId' fulfillerId,
t->>'itemId' itemId,
i_data->>'orderId' orderId,
1000,
t->>'skuCode' skuCode,
t->>'decorationTechnology' decorationTechnology,
(t->>'quantity')::numeric quantity ,
NOW()
FROM jsonb_array_elements(i_data -> 'items') t ;
END;
$function$
LANGUAGE plpgsql;
ERROR: "vw_item_status_detail" is not a known variable
Not sure what is wrong with the syntax or MERGE doesn't work with insert for views table

SQL - Efficient versioning of DNS records

So far I have come up with this solution that needs further refinement (big thanks to #postgresql on freenode).
The problem I am trying to overcome is an efficient way of storing DNS records whilst maintaining some sort of history. The issue I am currently having is with the wCTE which is inserting new records and deleting old records correctly. It isn't, however, readding records. The wCTE is:
WITH deltas AS (
SELECT o, n FROM (
SELECT id, name, domain_id, class_id, addr FROM record WHERE tld_id = $1
) AS o FULL OUTER JOIN record_temp n
ON (
o.name = n.name AND
o.domain_id = n.domain_id AND
o.class_id = n.class_id AND
o.addr = n.addr
)
WHERE (o.name, o.domain_id, o.class_id, o.addr)
IS DISTINCT FROM (n.name, n.domain_id, n.class_id, n.addr)
), mark_dead AS (
UPDATE record SET alive = FALSE
WHERE id IN (
SELECT (o).id FROM deltas WHERE (o).id IS NOT NULL
) RETURNING *
)
INSERT INTO record (name, domain_id, tld_id, class_id, addr)
SELECT (n).name, (n).domain_id, (n).tld_id, (n).class_id, (n).addr
FROM deltas WHERE
(n).name IS NOT NULL AND
(n).domain_id IS NOT NULL AND
(n).tld_id IS NOT NULL AND
(n).class_id IS NOT NULL AND
(n).addr IS NOT NULL
;
The o result has all the old records that do not exist in record_temp, n has all the records that are new and need to be inserted. I expect I need to add another join which pulls in (an inner join?) results that exist on both tables (which if marked as dead, need to be marked as alive).
The rest of the schema for reference is:
CREATE TABLE record (
id SERIAL,
name VARCHAR(255),
domain_id INT,
tld_id INT,
class_id INT,
addr INET,
alive BOOLEAN DEFAULT TRUE,
PRIMARY KEY (id),
CONSTRAINT fk1 FOREIGN KEY (domain_id) REFERENCES domain (id) MATCH SIMPLE,
CONSTRAINT fk2 FOREIGN KEY (tld_id) REFERENCES tld (id) MATCH SIMPLE,
UNIQUE(name, domain_id, class_id, addr)
);
CREATE TABLE record_history (
id SERIAL,
record_id INT,
history_type record_history_type,
stamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT fk1 FOREIGN KEY (record_id) REFERENCES record (id) MATCH SIMPLE,
PRIMARY KEY(id)
);
CREATE TEMP TABLE record_temp (
name VARCHAR(255),
domain_id INT,
tld_id INT,
class_id INT,
addr INET,
UNIQUE(name, domain_id, class_id, addr)
)
ON COMMIT DROP;
record_history is populated using functions and triggers and is populating how I expect it to, below are these triggers:
RETURNS TRIGGER AS $$
BEGIN
INSERT INTO record_history (record_id, history_type) VALUES (NEW.id, 'added');
RETURN NEW;
END;
$$ language 'plpgsql';
RETURNS TRIGGER AS $$
BEGIN
IF NEW.alive = OLD.alive THEN
RETURN NEW;
END IF;
IF NEW.alive THEN
INSERT INTO record_history (record_id, history_type) VALUES (NEW.id, 'added');
END IF;
IF NOT NEW.alive THEN
INSERT INTO record_history (record_id, history_type) VALUES (NEW.id, 'deleted');
END IF;
RETURN NEW;
END;
$$ language 'plpgsql';
ON record FOR EACH ROW EXECUTE PROCEDURE
add_insert_record_history();
ON record FOR EACH ROW EXECUTE PROCEDURE
add_update_record_history();
I seem to have it working how I want with the following query, which I feel is incredibly unoptimized:
WITH deltas AS (
SELECT o, n FROM (
SELECT id, name, domain_id, class_id, addr FROM record WHERE tld_id = $1
) AS o FULL OUTER JOIN record_temp n
ON (
o.name = n.name AND
o.domain_id = n.domain_id AND
o.class_id = n.class_id AND
o.addr = n.addr
)
WHERE (o.name, o.domain_id, o.class_id, o.addr)
IS DISTINCT FROM (n.name, n.domain_id, n.class_id, n.addr)
), mark_dead AS (
UPDATE record SET alive = FALSE
WHERE id IN (
SELECT (o).id FROM deltas WHERE (o).id IS NOT NULL
) RETURNING *
), mark_alive AS (
UPDATE record SET alive = TRUE
WHERE alive = FALSE AND id IN (
SELECT id FROM (
SELECT id, name, domain_id, class_id, addr FROM record WHERE tld_id = $1
) AS o INNER JOIN record_temp n
ON (
o.name = n.name AND
o.domain_id = n.domain_id AND
o.class_id = n.class_id AND
o.addr = n.addr
)
) RETURNING *
)
INSERT INTO record (name, domain_id, tld_id, class_id, addr)
SELECT (n).name, (n).domain_id, (n).tld_id, (n).class_id, (n).addr
FROM deltas WHERE
(n).name IS NOT NULL AND
(n).domain_id IS NOT NULL AND
(n).tld_id IS NOT NULL AND
(n).class_id IS NOT NULL AND
(n).addr IS NOT NULL
;