How to join jsonb array elements in Postgres? - sql

I am using Postgres 9.5, and I have the following tables:
Users
id UUID
name TEXT
Images
id UUID
key TEXT
width INTEGER
height INTEGER
Posts
id UUID
title TEXT
author_id UUID
content JSONB
The posts' content is like:
[
{ "type": "text", "text": "learning pg" },
{ "type": "image", "image_id": "8f4422b4-3936-49f5-ab02-50aea5e6755f" },
{ "type": "image", "image_id": "57efc97c-b9b4-4cd5-b1e1-3539f5853835" },
{ "type": "text", "text": "pg is awesome" }
]
Now I want to join the image type of content, and populate them with image_id, like:
{
"id": "cb1267ca-b1ac-4daa-8c7e-72d4c000e9fa",
"title": "Learning join jsonb in Postgres",
"author_id": "deba01b7-ec58-4cc2-b3ae-7dc42e582767",
"content": [
{ "type": "text", "text": "learning pg" },
{
"type": "image",
"image": {
"id": "8f4422b4-3936-49f5-ab02-50aea5e6755f",
"key": "/upload/test1.jpg",
"width": 800,
"height": 600
}
},
{
"type": "image",
"image": {
"id": "57efc97c-b9b4-4cd5-b1e1-3539f5853835",
"key": "/upload/test2.jpg",
"width": 1280,
"height": 720
}
},
{ "type": "text", "text": "pg is awesome" }
]
}
Here is my test sql file:
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
DROP TABLE IF EXISTS Users;
DROP TABLE IF EXISTS Images;
DROP TABLE IF EXISTS Posts;
CREATE TABLE Users (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
name text NOT NULL
);
CREATE TABLE Images (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
key TEXT,
width INTEGER,
height INTEGER,
creator_id UUID
);
CREATE TABLE Posts (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
title TEXT,
author_id UUID,
content JSONB
);
DO $$
DECLARE user_id UUID;
DECLARE image1_id UUID;
DECLARE image2_id UUID;
BEGIN
INSERT INTO Users (name) VALUES ('test user') RETURNING id INTO user_id;
INSERT INTO Images (key, width, height, creator_id) VALUES ('upload/test1.jpg', 800, 600, user_id) RETURNING id INTO image1_id;
INSERT INTO Images (key, width, height, creator_id) VALUES ('upload/test2.jpg', 600, 400, user_id) RETURNING id INTO image2_id;
INSERT INTO Posts (title, author_id, content) VALUES (
'test post',
user_id,
('[ { "type": "text", "text": "learning pg" }, { "type": "image", "image_id": "' || image1_id || '" }, { "type": "image", "image_id": "' || image2_id || '" }, { "type": "text", "text": "pg is awesome" } ]') :: JSONB
);
END $$;
Is there any way to implement this requirement?

SELECT jsonb_pretty(to_jsonb(p)) AS post_row_as_json
FROM (
SELECT id, title, author_id, c.content
FROM posts p
LEFT JOIN LATERAL (
SELECT jsonb_agg(
CASE WHEN c.elem->>'type' = 'image' AND i.id IS NOT NULL
THEN elem - 'image_id' || jsonb_build_object('image', i)
ELSE c.elem END) AS content
FROM jsonb_array_elements(p.content) AS c(elem)
LEFT JOIN images i ON c.elem->>'type' = 'image'
AND i.id = (elem->>'image_id')::uuid
) c ON true
) p;
How?
Unnest the jsonb array, producing 1 row per array element:
jsonb_array_elements(p.content) AS c(elem)
For each element LEFT JOIN to images on the conditions that
... the key 'type' has the value 'image': c.elem->>'type' = 'image'
... the UUID in image_id matches: i.id = (elem->>'image_id')::uuid
An invalid UUID in content would raise an exception.
For image types, where a matching image was found
c.elem->>'type' = 'image' AND i.id IS NOT NULL
remove the key 'image_id' and add the related image row as jsonb value:
elem - 'image_id' || jsonb_build_object('image', i)
Else keep the original element.
Re-aggregate the modified elements to a new content column with jsonb_agg().
Would work with a plain ARRAY constructor as well.
Unconditionally LEFT JOIN LATERAL the result to posts and select all columns, only replace p.content with the generated replacement c.content
In the outer SELECT, convert the whole row to jsonb with a simple to_jsonb().
jsonb_pretty() is only for human-readable representation and totally optional.
All jsonb functions are documented in the manual.

Related

Update json data in oracle sql

This is my json data in one of the oracle sql columns "jsoncol" in a table named "jsontable"
{
"Company": [
{
"Info": {
"Address": "123"
},
"Name": "ABC",
"Id": 999
},
{
"Info": {
"Address": "456"
},
"Name": "XYZ",
"Id": 888
}
]
}
I am looking for an UPDATE query to update all the value of "Name" with a new value based on a particular "Id" value.
Thanks in advance
From Oracle 19, you can use JSON_MERGEPATCH:
UPDATE jsontable j
SET jsoncol = JSON_MERGEPATCH(
jsoncol,
(
SELECT JSON_OBJECT(
KEY 'Company'
VALUE JSON_ARRAYAGG(
CASE id
WHEN 999
THEN JSON_MERGEPATCH(
json,
'{"Name":"DEF"}'
)
ELSE json
END
FORMAT JSON
RETURNING CLOB
)
FORMAT JSON
RETURNING CLOB
)
FROM jsontable jt
CROSS APPLY JSON_TABLE(
jt.jsoncol,
'$.Company[*]'
COLUMNS(
json VARCHAR2(4000) FORMAT JSON PATH '$',
id NUMBER PATH '$.Id'
)
)
WHERE jt.ROWID = j.ROWID
)
)
Which, for the sample data:
CREATE TABLE jsontable (
jsoncol CLOB CHECK (jsoncol IS JSON)
);
INSERT INTO jsontable (jsoncol)
VALUES ('{
"Company": [
{
"Info": {
"Address": "123"
},
"Name": "ABC",
"Id": 999
},
{
"Info": {
"Address": "456"
},
"Name": "XYZ",
"Id": 888
}
]
}');
Then after the UPDATE, the table contains:
JSONCOL
{"Company":[{"Info":{"Address":"123"},"Name":"DEF","Id":999},{"Info":{"Address":"456"},"Name":"XYZ","Id":888}]}
db<>fiddle here
You can use REPLACE() within JSON_TABLE() function in order to update the value of the Name(from ABC to DEF) for a specific Id value(999) such as
UPDATE jsontable jt0
SET jsoncol = ( SELECT REPLACE(jsoncol,jt.name,'DEF')
FROM jsontable j,
JSON_TABLE(jsoncol,
'$' COLUMNS(NESTED PATH '$.Company[*]'
COLUMNS(
name VARCHAR2 PATH '$.Name',
id INT PATH '$.Id'
)
)
) jt
WHERE jt.id = 999
AND j.id = jt0.id )
for the DB version prior to 19 provided that the identity values(id) of the table and of the JSON values are unique throughout the table and each individual JSON value, respectively.
Demo

How to remove object by value from a JSONB type array?

I want to remove a JSONB object by their unique 'id' value from a JSONB array. I am no expert at writing SQL code, but I managed to write the concatenate function.
For an example: Remove this object from an array below.
{
"id": "ad26e2be-19fd-4862-8f84-f2f9c87b582e",
"title": "Wikipedia",
"links": [
"https://en.wikipedia.org/1",
"https://en.wikipedia.org/2"
]
},
Schema:
CREATE TABLE users (
url text not null,
user_id SERIAL PRIMARY KEY,
name VARCHAR,
list_of_links jsonb default '[]'
);
list_of_links format:
[
{
"id": "ad26e2be-19fd-4862-8f84-f2f9c87b582e",
"title": "Wikipedia",
"links": [
"https://en.wikipedia.org/1",
"https://en.wikipedia.org/2"
]
},
{
"id": "451ac172-b93e-4158-8e53-8e9031cfbe72",
"title": "Russian Wikipedia",
"links": [
"https://ru.wikipedia.org/wiki/",
"https://ru.wikipedia.org/wiki/"
]
},
{
"id": "818b99c8-479b-4846-ac15-4b2832ec63b5",
"title": "German Wikipedia",
"links": [
"https://de.wikipedia.org/any",
"https://de.wikipedia.org/any"
]
},
...
]
The concatenate function:
update users set list_of_links=(
list_of_links || (select *
from jsonb_array_elements(list_of_links)
where value->>'id'='ad26e2be-19fd-4862-8f84-f2f9c87b582e'
)
)
where url='test'
returning *
;
Your json data is structured so you have to unpack it, operate on the unpacked data, and then repack it again:
SELECT u.url, u.user_id, u.name,
jsonb_agg(
jsonb_build_object('id', l.id, 'title', l.title, 'links', l.links)
) as list_of_links
FROM users u
CROSS JOIN LATERAL jsonb_to_recordset(u.list_of_links) AS l(id uuid, title text, links jsonb)
WHERE l.id != 'ad26e2be-19fd-4862-8f84-f2f9c87b582e'::uuid
GROUP BY 1, 2, 3
The function jsonb_to_recordset is a set-returning function so you have to use it as a row source, joined to its originating table with the LATERAL clause so that the list_of_links column is available to the function to be unpacked. Then you can delete the records you are not interested in using the WHERE clause, and finally repack the structure by building the record fields into a jsonb structure and then aggregating the individual records back into an array.
I wrote this on JS but that does not matter to how it works. Essentially, its getting all the items from the array, then finding the matching id which returns an index. And using that index, I use "-" operator which takes the index and removes it from the array. Sorry if my grammar is bad.
//req.body is this JSON object
//{"url":"test", "id": "ad26e2be-19fd-4862-8f84-f2f9c87b582e"}
var { url, id } = req.body;
pgPool.query(
`
select list_of_links
from users
where url=$1;
`,
[url],
(error, result) => {
//block code executing further if error is true
if (error) {
res.json({ status: "failed" });
return;
}
if (result) {
// this function just returns the index of the array element where the id matches from request's id
// 0, 1, 2, 3, 4, 5
var index_of_the_item = result.rows.list_of_links
.map(({ id: db_id }, index) =>
db_id === id ? index : false
)
.filter((x) => x !== false)[0];
//remove the array element by it's index
pgPool.query(
`
update users
set list_of_links=(
list_of_links - $1::int
)
where url=$2
;
`,
[index_of_the_item, url], (e, r) => {...}
);
}
}
);

Reading from JSON column with dynamic element name

I have a table that contains json data that I need to unpick and store in a relational DB. I am using an Oracle DB 19.0 and have created the following table:
CREATE TABLE J_PAGE
(
PK_PAGE_ID NUMBER ( 10 , 0 )
, FK_RESP_ID NUMBER ( 10 , 0 ) DEFAULT -1
, CL_PAGE_ID NUMBER ( 10 , 0 ) DEFAULT -1
, CL_RESP CLOB CONSTRAINT CK_PAGE_01 CHECK ( CL_RESP IS JSON )
, CL_DT DATE DEFAULT SYSDATE
) ;
The column CL_RESP is a clob value that returns the response from a call to UTL_HTTP.GET_RESPONSE.
{
"count": 2,
"results": [
{
"key": "stories",
"id": "10000"
},
{
"key": "stories",
"id": "10001"
}
],
"stories": {
"10000": {
"title": "story1",
"description": null,
"start_date": "2020-04-01",
"id": "10001"
},
"10001": {
"title": "story2",
"description": null,
"start_date": null,
"id": "10001"
}
},
"meta": {
"count": 2,
"page_count": 1,
"page_number": 1,
"page_size": 20
}
}
I need to extract the contents of the "stories" element but I'm getting stuck where the next element doesn't have a static name.
I know I can do the following to return 1 row using story 10001...
SELECT
page.CL_RESP.stories."10001".title[*] CL_TITLE
FROM
J_PAGE page ;
But I need all stories so perhaps a wildcard in place of 10001?
SELECT
page.CL_RESP.stories.*.title[*] CL_TITLE
FROM
J_PAGE page ;
Can anyone help?
Solved it.
SELECT
t.*
FROM
J_PAGE o,
JSON_TABLE ( o.CL_RESP , '$.stories.*'
COLUMNS (
CL_TITLE VARCHAR2 PATH '$.title'
, CL_DESC VARCHAR2 PATH '$.description'
, CL_START_DT TIMESTAMP PATH '$.start_date'
, CL_ID NUMBER PATH '$.id'
)
) t ;

How to retrieve specific JSON value from a column containing a JSON array

I have this JSON array in a SQL Server table:
[
{
"FieldName": "DateCreated",
"FieldValue": "10/22/2020"
},
{
"FieldName": "IsMember",
"FieldValue": "false"
},
{
"FieldName": "EntityId",
"FieldValue": "ABC123"
}
]
I want to fetch only the FieldValue of the EntityId object, so the output should be only ABC123.
I have this query
SELECT JSON_VALUE(JsonColumnData, '$[2].FieldValue') AS EntityId
FROM MyTable
This returns the EntityId value, but the thing is that I have no guarantee that the EntityId will always be in the same position of the JSON array.
Is it possible to have the select return the EntityId regardless of its position in the JSON array?
You can use OPENJSON() to query the data and just filter the row(s) that contain EntityId...
create table dbo.MyTable (
JsonColumnData nvarchar(max)
);
insert dbo.MyTable (JsonColumnData) values (N'[
{
"FieldName": "DateCreated",
"FieldValue": "10/22/2020"
},
{
"FieldName": "IsMember",
"FieldValue": "false"
},
{
"FieldName": "EntityId",
"FieldValue": "ABC123"
}
]');
select FieldValue as EntityId
from dbo.MyTable
cross apply openjson(JsonColumnData) with (
FieldName nvarchar(11),
FieldValue nvarchar(11)
)
where FieldName = N'EntityId';
Which yields...
EntityId
---------
ABC123

insert into database only when id does not exist

I am using Postgres, In my code below I want to insert into the database only if the id doesn't already exist.
DROP TABLE json_table;
CREATE temp TABLE json_table (
id VARCHAR(50) PRIMARY KEY,
str_col VARCHAR(500),
int_col SMALLINT,
bool_col BOOLEAN,
json_col JSON,
float_col DECIMAL
);
INSERT INTO json_table
SELECT * FROM json_populate_recordset (NULL::json_table,
'{ "insert": [
{
"id": "1",
"str_col": "Postgres bar data",
"int_col": 3151,
"bool_col": false,
"json_col": {
"data": "tutorials"
},
"float_col": 11.51099159756918
},
{
"id": "2",
"str_col": "Postgres tutorials data data",
"int_col": 4237,
"bool_col": true,
"json_col": {
"type": "type"
},
"float_col": 48.94065780742467
}
]}'::json->'insert');
SELECT * FROM json_table;
You can use the on conflict do nothing clause. Your id column is the primary key of the table, so this would just work:
INSERT INTO json_table
SELECT *
FROM json_populate_recordset (NULL::json_table,
'{ "insert": [
{
"id": "1",
"str_col": "Postgres bar data",
"int_col": 3151,
"bool_col": false,
"json_col": {
"data": "tutorials"
},
"float_col": 11.51099159756918
},
{
"id": "2",
"str_col": "Postgres tutorials data data",
"int_col": 4237,
"bool_col": true,
"json_col": {
"type": "type"
},
"float_col": 48.94065780742467
}
]}'::json->'insert')
ON CONFLICT (id) DO NOTHING;
Demo on DB Fiddle
Say this record was inserted before:
insert into json_table(id) values(1)
The above query executes without error, and inserts the second row only.
Define a unique constraint on id:
alter table t add constraint unq_json_table_id unique(id);
You can then insert using on conflict:
insert into json_table . . .
. . .
on conflict (id) do nothing;