Get last key from jsonb (postgres) - sql

Considering the following content on a jsonb field, and that the keys immediately after "audio" are random (I'll never know which value it'll be returning beforehand): is there any way I can query for the last key inside "audio" ("2814462280" in this example)?
{
"test": {
"audio": {
"1175828715": {
"direction": "outbound",
},
"2814462280": {
"direction": "inbound",
}
}
}
}

I've wrote a function to dynamically construct jsonb path to get the last key value.
As was previously mentioned in the comments section Postgres can reorder the keys in a jsonb field.
Here is the demo.
do $$
declare
-- instantiate all the necessary variables
audio_json jsonb;
audio_json_keys text[];
last_key_index integer;
last_key_pointer text;
result_json jsonb;
query text;
begin
audio_json := (select (metadata->>'test')::jsonb->>'audio' from test);
audio_json_keys := (select array_agg(keys) from jsonb_object_keys(audio_json::jsonb) keys);
last_key_index := (select array_length(audio_json_keys, 1));
last_key_pointer := audio_json_keys[last_key_index];
query := 'select (''' || audio_json::text || ''')::jsonb->>''' || last_key_pointer::text || '''';
execute query into result_json;
raise info '%', result_json;
end $$;

Object.keys can be useful in this case. If you want to query for the last key. you can try following
const obj = {
"test": {
"audio": {
"1175828715": {
"direction": "outbound",
},
"2814462280": {
"direction": "inbound",
}
}
}
}
const audio_obj:any = obj.test.audio
const keys = Object.keys(audio_obj)
const len = keys.length
const lastKey: string = keys[len-1] // "2814462280"
if you want to get the last object you can add one line code
const lastObj = audio_obj[lastKey] // {"direction": "inbound"}
Hope this helps you.

Related

BigQuery - Extract nested children JSON as rows

I have a JSON structure in a field that looks like this. I'm trying to extract the task viewed and completed date, there could be any number of tasks in each field.
Sample data
"task_1a232445": {
"completedDate": {
"_seconds": 1670371200,
"_nanoseconds": 516000000
},
"viewedDate": {
"_seconds": 1666652400,
"_nanoseconds": 667000000
}
},
"task_1a233445": {
"completedDate": {
"_seconds": 1670198400,
"_nanoseconds": 450000000
},
"viewedDate": {
"_seconds": 1674000000,
"_nanoseconds": 687000000
}
}
}
I have tried to adapt this previous question I asked, but where there are multiple tasks in a single row (as sample data) I can only return the first completedDate
Consider below approach
create temp function extract_keys(input string) returns array<string> language js as """
return Object.keys(JSON.parse(input));
""";
create temp function extract_values(input string) returns array<string> language js as """
return Object.values(JSON.parse(input));
""";
create temp function get_leaves(input string) returns string language js as '''
function flattenObj(obj, parent = '', res = {}){
for(let key in obj){
let propName = parent ? parent + '.' + key : key;
if(typeof obj[key] == 'object'){
flattenObj(obj[key], propName, res);
} else {
res[propName] = obj[key];
}
}
return JSON.stringify(res);
}
return flattenObj(JSON.parse(input));
''';
select * from (
select
arr[safe_offset(0)] task,
arr[safe_offset(1)] date,
date(timestamp_seconds(cast(val as int64))) val
from your_table, unnest([get_leaves(json)]) leaves,
unnest(extract_keys(leaves)) key with offset
join unnest(extract_values(leaves)) val with offset
using(offset),
unnest([struct(split(key, '.') as arr)])
where arr[safe_offset(2)] = '_seconds'
)
pivot (any_value(val) for date in ('completedDate', 'viewedDate'))
if applied to sample data in your question
with your_table as (
select '''{
"task_1a232445": {
"completedDate": {
"_seconds": 1670371200,
"_nanoseconds": 516000000
},
"viewedDate": {
"_seconds": 1666652400,
"_nanoseconds": 667000000
}
},
"task_1a233445": {
"completedDate": {
"_seconds": 1670198400,
"_nanoseconds": 450000000
},
"viewedDate": {
"_seconds": 1674000000,
"_nanoseconds": 687000000
}
}
}
''' as json
)
output is

How to do WHERE IN/ANY on multiple columns in Golang with pq library?

I have a list of item type and item numbers like follows:
items := models.ItemKeys{
ItemKeys: []models.ItemKey{
{
ItemType: "type1",
ItemNumber: "10347114",
},
{
ItemType: "type2",
ItemNumber: "40428383",
},
{
ItemType: "type3",
ItemNumber: "90351753",
},
},
}
I would like to produce this kind of query:
SELECT * FROM item
WHERE (item_type, item_number) IN (('type1','10347114'), ('type2','40428383'), ('type3','90351753'))
it is worth mentioning that I'm using pq package (https://github.com/lib/pq). I happen to find there is a way using ANY instead. However, when I try it, it says sql: converting argument $1 type: pq: Unable to convert models.ItemKeys to array
Here is my current code:
rows, err := r.Db.QueryContext(ctx, "SELECT * "+
"FROM item "+
"WHERE (item_type, item_number) = ANY($1) "+
"AND deleted_dtime IS NULL", pq.Array(items))
Does anyone have a clue how to do it correctly?
You could do the following:
inSQL, args := "", []interface{}{}
for i, itemKey := range items.ItemKeys {
n := i * 2
inSQL += fmt.Sprintf("($%d,$%d),", n+1, n+2)
args = append(args, itemKey.ItemType, itemKey.ItemNumber)
}
inSQL = inSQL[:len(inSQL)-1] // drop last ","
query := `SELECT * FROM item WHERE (item_type, item_number) IN (` + inSQL + `) AND deleted_dtime IS NULL`
// query: SELECT * FROM item WHERE (item_type, item_number) IN (($1,$2),($3,$4),($5,$6)) AND deleted_dtime IS NULL
// args: ["type1" "10347114" "type2" "40428383" "type3" "90351753"]
rows, err := r.Db.QueryContext(ctx, query, args...)
// ...

How to Add a new field to nested jsonb object in Node-Postgres using parameters?

There is a table A and jsonb field 'context', it looks like:
{
"variable": {},
"other_stuff": {}
}
I need to add a new property to 'varialbe' every time i run query. So It should do smth like:
query1
{
"variable": {
"var1": "var1Value"
},
"other_stuff": {}
}
query2
{
"variable": {
"var1": "var1Value1",
"var2": "var1Value2"
},
"other_stuff": {}
}
And if variable already has this field, it should replace it.
I run this sql, and it works:
let sql = UPDATE chatbots.A SET context = context || jsonb_set(context, '{variable, var1}', 'var1Value1')
It works but when i need to replace 'var1' and 'var1Value1' by parameters ($1 and $2) - it doesn't work (in node-postgres)
I realized that i can replace second parameter by
to_jsonb($2::text)
But what should i do with the first one?
My javascript code
async setUsersVariables(params: {users: ChatUser[], variable_name: string, variable_value: string}) {
const {users, variable_name, variable_value} = params
if (!users.length) return false
let sql = "UPDATE chatbots.A SET context = context || jsonb_set(context, '{variable, $1}', to_jsonb($2)::text) WHERE chat_user_id IN ( "
const parsedUsers = users.map(e=> e?.chat_user_id)
let sqlParams: any[] = [variable_name, variable_value]
let idx = 3;
({ sql, idx, params: sqlParams } = addSqlArrayParams(sql, parsedUsers, idx, sqlParams));
sql += ` RETURNING chat_id, chat_user_id, platform, platform_user_id`;
const filteredUsers: any = (await this.pool.query(sql, sqlParams)).rows
return filteredUsers
}

Calling stored function in postgres 12 with json type argument by SQL query from pgAdmin results in error

I'm trying to call a stored function in postgres 12 DB which takes 1 parameter of json type and returns result of json type.
The function is like this:
CREATE OR REPLACE FUNCTION public.get_users(
data json,
OUT result json)
RETURNS json
LANGUAGE 'plv8'
COST 100
VOLATILE
AS $BODY$const dataJson = JSON.parse(data);
const arg_id = dataJson.id;
const arg_token = dataJson.token;
const arg_ids = dataJson.ids.join(",");
result = {};
const getAuthUserResult = plv8.execute( 'SELECT id, token, deleted FROM public.users WHERE id = $1', [arg_id]);
const authUser = getAuthUserResult[0];
switch (true) {
case getAuthUserResult.length === 0: {
result.code = "ERR_SENDER_NOTFOUND";
break;
}
case authUser.token !== arg_token: {
result.code = "ERR_SENDER_INVALIDTOKEN";
break;
}
case authUser.deleted !== 0: {
result.code = "ERR_SENDER_DELETED";
break;
}
default: {
result.code = "OK"
}
}
if (result.code === "OK") {
result.users = plv8.execute( 'SELECT $1 FROM public.users WHERE id IN ($2)', ["name", arg_id]);
}
$BODY$;
ALTER FUNCTION public.get_users(json)
OWNER TO postgres;
The function must take a json with keys: "id" - for id of request sender, "token" - for it's secret and "targets" - for ids of target users, as follows:
{
"id": 448,
"token": "someToken",
"targets": [449, 450, 451]
}
But when I try calling the function by an SQL query:
SELECT * FROM get_users('{"id":448,"token":"someToken","targets":[449,450,451]}');
I get an error:
ERROR: SyntaxError: Unexpected token o in JSON at position 1
CONTEXT: undefined() LINE 0: [object Object]
SQL state: XX000
I have double checked json, and it seems to be valid. Also a lot of resources present this way of calling functions with json typed parameter. What can be wrong with the way I do it?
The problem was actually in the function code intself.
First:
const dataJson = JSON.parse(data);
const arg_id = dataJson.id;
This is invalid. The proper way to access json values is simple:
const arg_id = data.id;
And second:
return result;
is missing at the end.

Azure Stream Analytics: Get Array Elements by name

I was wondering if it is possible for me to get the elements of the array by the name of property than the position. For example, this is my incoming data:
{
"salesdata": {
"productsbyzone": {
"zones": [{
"eastzone": "shirts, trousers"
},
{
"westzone": "slacks"
},
{
"northzone": "gowns"
},
{
"southzone": "maxis"
}
]
}
}
}
I intend to move this to a SQL database and I have columns within the database for each zone. The problem is that the order of different zones changes within each json. I was successfully using the following query until I realized that the position of the zones changes within each json:
WITH
salesData AS
(
SELECT
(c.salesdata.productsbyzone.zone,0) as eastzone,
(c.salesdata.productsbyzone.zone,1) as westzone,
(c.salesdata.productsbyzone.zone,2) as northzone,
(c.salesdata.productsbyzone.zone,3) as sourthzone,
FROM [sales-data] as c
)
SELECT
eastzone.eastzone as PRODUCTS_EAST,
westzone.westzone as PRODUCTS_WEST,
northzone.northzone as PRODUCTS_NORTH,
southzone.southzone as PRODUCTS_SOUTH
INTO PRODUCTSDATABASE
FROM salesData
Need a way to reference these fields by the name rather than by the position.
I recommend a solution: Use the JavaScript UDF in the azure stream job to complete the columns sort.
Please refer to my sample:
Input data(upset the order):
{
"salesdata": {
"productsbyzone": {
"zones": [{
"westzone": "slacks"
},
{
"eastzone": "shirts, trousers"
},
{
"northzone": "gowns"
},
{
"southzone": "maxis"
}
]
}
}
}
js udf code:
function test(arg) {
var z = arg;
var obj = {
eastzone: "",
westzone: "",
northzone: "",
southzone: ""
}
for(var i=0;i<z.length;i++){
switch(Object.keys(z[i])[0]){
case "eastzone":
obj.eastzone = z[i]["eastzone"];
continue;
case "westzone":
obj.westzone = z[i]["westzone"];
continue;
case "northzone":
obj.northzone = z[i]["northzone"];
continue;
case "southzone":
obj.southzone = z[i]["southzone"];
continue;
}
}
return obj;
}
You can define the order you want in the obj parameter
SQL:
WITH
c AS
(
SELECT
udf.test(jsoninput.salesdata.productsbyzone.zones) as result
from jsoninput
),
b AS
(
SELECT
c.result.eastzone as east,c.result.westzone as west,c.result.northzone as north,c.result.southzone as south
from c
)
SELECT
b.east,b.west,b.north,b.south
INTO
jaycosmos
FROM
b
Output:
Hope it helps you.
You can use GetArrayElement to return array element then access to each property. Please refer the below query
WITH
salesData AS
(
SELECT
GetArrayElement(zones,0) as z
FROM [sales-data] as s
)
SELECT
z.eastzone
z.westzone
z.northzone
z.southzone
FROM PRODUCTSDATABASE
FROM salesData