how do I extract value frm key "Nome" from JSON using JSON_EXTRACT in google bigquery?
I cannot use the key 135 in the query because it is dynamic (Like this JSON_EXTRACT(vista, '$.Agencia.135.Nome'))
How to use JSON_EXTRACT without having a key '135' name?
JSON Record Sample:
{
"Campanha": "Campanha A",
"Ad": "Ad A",
"Agencia": {
"135": {
"Celular": ".",
"Codigo": "135",
"CodigoPai": "105",
"DDD": "00",
"Email": "email-A#email.com",
"Nome": "Nome A",
"Fone": "00 0000.0000",
"Fone2": ".",
"Foto": "foto-A.jpg"
}
}
}
Not sure if your json is formatted correctly. Is the key '135' an array? If so, format it properly and you can access it as the example below:
SELECT JSON_EXTRACT(json_text, '$.Agencia.135[1]') AS nome
FROM UNNEST([
'{"Agencia":{"135":[{"Codigo":"135"},{"Nome":"Nome A"}]}}'
]) AS json_text;
That would give you:
[
{
"nome": "{\"Nome\":\"Nome A\"}"
}
]
For more references about the JSON_EXTRACT: https://cloud.google.com/bigquery/docs/reference/standard-sql/json_functions#json_extract
Use below approach
execute immediate (
select string_agg("select " || key || ''' key
, JSON_EXTRACT_SCALAR(vista, '$.Agencia.''' || key || '''.Nome') AS Nome
from `project.dataset.table`''', " union all ")
from `project.dataset.table`, unnest(regexp_extract_all(regexp_replace(JSON_EXTRACT(vista, '$.Agencia'), r':{.*?}+', ''), r'"(.*?)"')) key
);
If applied to sample data in your question - output is
Also, depends on your use case - you might try below option too
execute immediate (
select 'select * from (' || string_agg("select " || key || ''' key
, JSON_EXTRACT_SCALAR(vista, '$.Agencia.''' || key || '''.Nome') AS Nome
from `project.dataset.table`''', " union all ") || ') where not Nome is null'
from `project.dataset.table`, unnest(regexp_extract_all(regexp_replace(JSON_EXTRACT(vista, '$.Agencia'), r':{.*?}+', ''), r'"(.*?)"')) key
);
Related
I am trying to create a nested json in Snowflake and have narrowed down the query like below where I have nested it on id. However, I want the nested json to also apply to the inner layer and I am finding it hard to get the right query for it.
WITH subquery AS (
SELECT id, placeId, actionId, resultValue
FROM my_table
)
SELECT id,
'{"resultValues": {' || listagg('"' || placeId || '": {"' || actionId || '": ' || resultValue || '}', ',') within group (order by placeId) || '}}' as nested_json
FROM subquery
GROUP BY id;
Below is how the current result is looking like for each id.
I am trying to get the actionId1 and actionId2 grouped under the placeId1 and placeId2 so that it looks like below. How do I get this done? Any ideas would be appreciated.
Meet FLATTEN() and LATERAL they like to hang out with OBJECT_AGG() who needs his own space via CTE's.
WITH CTE AS (
SELECT
parse_json(
' { "resultValues": [
{ "placeId1": { "actionId1": 1.1 } }, { "placeId1": { "actionId2": 1.2 } },
{ "placeId2": { "actionId1": 1.3 } }, { "placeId2":{ "actionId2": 1.4} } ] }'
) VOLIA
),
CTE2 AS (
SELECT
DISTINCT KIAORA.PATH KIAORA,
TE_REO.PATH TE_REO,
OBJECT_AGG(MAORI.PATH, MAORI.VALUE) OVER (PARTITION BY TE_REO.PATH) MAORI
FROM
CTE,
LATERAL FLATTEN(INPUT => VOLIA) KIAORA,
LATERAL FLATTEN(KIAORA.VALUE) HELLO,
LATERAL FLATTEN(HELLO.VALUE) TE_REO,
LATERAL FLATTEN (INPUT => TE_REO.VALUE) MAORI
)
SELECT
DISTINCT OBJECT_CONSTRUCT(
KIAORA,
ARRAY_CONSTRUCT(
OBJECT_AGG(TE_REO, MAORI) OVER (PARTITION BY KIAORA)
)
) ANSWER,
VOLIA
FROM
CTE2, CTE
after starting :
I have a problem with converting XML content to JSON format (with plain oracle select statement), where more then 1 sub level of data is present in the original XML - with my code the result of level 2+ is presented as string and not as JSON_OBJECT. Please, could someone tell me, where is fault in my code or what I'm doing wrong:
source:
<envelope>
<sender>
<name>IZS</name>
<country>SU</country>
<address>LOCATION 10B</address>
<address>1000 CITY</address>
<sender_identifier>SU46794093</sender_identifier>
<sender_address>
<sender_agent>SKWWSI20XXX</sender_agent>
<sender_mailbox>SI56031098765414228</sender_mailbox>
</sender_address>
</sender>
</envelope>
transformation select statement:
WITH SAMPLE AS (SELECT XMLTYPE ('
<envelope>
<sender>
<name>IZS</name>
<country>SU</country>
<address>LOCATION 10B</address>
<address>1000 CITY</address>
<sender_identifier>SU46794093</sender_identifier>
<sender_address>
<sender_agent>SKWWSI20XXX</sender_agent>
<sender_mailbox>SI56031098765414228</sender_mailbox>
</sender_address>
</sender>
</envelope>') XMLDOC FROM DUAL)
SELECT JSON_SERIALIZE (
JSON_OBJECT (
KEY 'envelope' VALUE
JSON_OBJECTAGG (
KEY ID_LEVEL1 VALUE
CASE ID_LEVEL1
WHEN 'sender' THEN
( SELECT JSON_OBJECTAGG (
KEY ID_LEVEL2 VALUE
CASE ID_LEVEL2
WHEN 'sender_address' THEN
( SELECT JSON_OBJECTagg (KEY ID_LEVEL22 VALUE TEXT_LEVEL22)
FROM XMLTABLE ('/sender/sender_address/*'
PASSING XML_LEVEL2
COLUMNS ID_LEVEL22 VARCHAR2 (128) PATH './name()',
TEXT_LEVEL22 VARCHAR2 (128) PATH './text()'
)
)
ELSE
TEXT_LEVEL2
END)
FROM XMLTABLE ('/sender/*'
PASSING XML_LEVEL2
COLUMNS ID_LEVEL2 VARCHAR2 (1024) PATH './name()',
TEXT_LEVEL2 VARCHAR2 (1024) PATH './text()'
)
)
ELSE
'"' || TEXT_LEVEL1 || '"'
END FORMAT JSON)
) PRETTY
)JSON_DOC
FROM SAMPLE, XMLTABLE ('/envelope/*'
PASSING XMLDOC
COLUMNS ID_LEVEL1 VARCHAR2 (1024) PATH './name()',
TEXT_LEVEL1 VARCHAR2 (1024) PATH './text()',
XML_LEVEL2 XMLTYPE PATH '.'
);
wrong result:
{
"envelope" :
{
"sender" :
{
"name" : "IZS",
"country" : "SU",
"address" : "LOCATION 10B",
"address" : "1000 CITY",
"sender_identifier" : "SU46794093",
"sender_address" : "{\"sender_agent\":\"SKWWSI20XXX\",\"sender_mailbox\":\"SI56031098765414228\"}"
}
}
}
wrong part:
***"sender_address" : "{\"sender_agent\":\"SKWWSI20XXX\",\"sender_mailbox\":\"SI56031098765414228\"}"***
For the level 1 text you're wrapping the value in double-quotes and specifying format json; you aren't doing that for level 2. If you change:
ELSE
TEXT_LEVEL2
END
to:
ELSE
'"' || TEXT_LEVEL2 || '"'
END FORMAT JSON)
then the result is:
{
"envelope" :
{
"sender" :
{
"name" : "IZS",
"country" : "SU",
"address" : "LOCATION 10B",
"address" : "1000 CITY",
"sender_identifier" : "SU46794093",
"sender_address" :
{
"sender_agent" : "SKWWSI20XXX",
"sender_mailbox" : "SI56031098765414228"
}
}
}
}
fiddle
The problem is that you need kind of conditional "FORMAT JSON" in the "SELECT JSON_OBJECTAGG ( KEY ID_LEVEL2 VALUECASE ID_LEVEL2": when the ID_LEVEL2 is 'sender_address' but not in the ELSE part, but the syntax requires you put after the END of CASE, and of course this fails for the "ELSE TEXT_LEVEL2" part.
What's the most terse way I can express an UPDATE...SET...FROM SQL statement using knex? This is what I've got currently:
const query =
knex('user_subscriptions').update(subscription).toQuery() +
knex.raw(
' from plans p where customer_id = ? and p.id = us.plan_id ' +
'returning us.*, p.name',
[customer_id]
);
The reason I'm doing this is that I want to efficiently return a field from a related table (JOIN style) without needing a separate query.
As instructed in the official site: knexjs.org/#Builder-update
knex('user_subscriptions')
.returning(['us.*', 'plans.name', 'customer_id'])
.where({
customer_id: '?',
plans.id: us.plan_id
})
.update({
subscription : '?
})
Does:
update `user_subscriptions` set `subscription ` = '?' where `customer_id` = '?' and 'plans.id' = 'us.plan_id'
Returns:
[ us.*: ..., plans.name: ..., customer_id: ... ]
In bigquery, if we are interested in constructing json output, we can usually use struct for json object when the keys are known beforehand.
SELECT TO_JSON_STRING(STRUCT(key1))
FROM (SELECT "val1" as key1 UNION ALL
SELECT "val2" as key1)
Result
{"key1":"val1"}
{"key1":"val2"}
But in the case where the keys are dynamic, we really want a map type, similar to the avro map type
For example
SELECT *
FROM (SELECT "key1" as key, "val1" as val UNION ALL
SELECT "key2" as key, "val2" as val)
should return
{"key1": "val1", "key2": "val2"}
is there anyway to achieve this using BigQuery SQL?
Below is for BigQuery Standard SQL
Something simple like below should produce expected result
#standardSQL
WITH `project.dataset.table` AS (
SELECT "key1" AS key, "val1" AS val UNION ALL
SELECT "key2" AS key, "val2" AS val
)
SELECT '{' || STRING_AGG(REPLACE(TRIM(FORMAT('%T', t), '()'), '", "', '": "'), ', ') || '}' AS return
FROM `project.dataset.table` t
with output
Row return
1 {"key1": "val1", "key2": "val2"}
You can use Dynamic SQL to generate JSON string:
DECLARE
JSONSTR STRING;
SET
JSONSTR = (
SELECT
'{' || STRING_AGG('"' || key || '": "' || val || '"', ', ') || '}'
FROM (
SELECT *
FROM (SELECT "key1" AS key, "val1" AS val
UNION ALL
SELECT "key2" AS key, "val2" AS val)));
EXECUTE IMMEDIATE
FORMAT("""SELECT '%t'""",JSONSTR);
[
{
"key":"expiry_date",
"type":"date",
"label":"Expiry Date",
"required":false,
"default_value":"2029-12-15"
},
{
"key":"brand",
"type":"text",
"label":"Brand",
"required":false,
"default_value":"clipsal"
}
]
Is there a way that I could extract the default_value of "expiry_date" in the nested JSON above? The data is under a column called attributes.
Have you tried any of these functions described here? Maybe it can help.
Also, if the first element of the json array will be always what you want, you could use something like:
WITH test_table AS (
SELECT "[{\"key\":\"expiry_date\",\"type\":\"date\",\"label\":\"Expiry Date\",\"required\":false,\"default_value\":\"2029-12-15\"},{\"key\":\"brand\",\"type\":\"text\",\"label\":\"Brand\",\"required\":false,\"default_value\":\"clipsal\"}]" AS json_text_field
)
SELECT JSON_EXTRACT(json_text_field, '$[0].default_value') FROM test_table
If the keys it's not always the first, you could use this instead:
WITH test_table AS (
SELECT "[{\"key\":\"expiry_date\",\"type\":\"date\",\"label\":\"Expiry Date\",\"required\":false,\"default_value\":\"2029-12-15\"},{\"key\":\"brand\",\"type\":\"text\",\"label\":\"Brand\",\"required\":false,\"default_value\":\"clipsal\"}]" AS json_text_field
)
SELECT value FROM (
SELECT JSON_EXTRACT(json_text_field, '$.key') AS id, JSON_EXTRACT(json_text_field, '$.default_value') AS value FROM test_table, UNNEST(JSON_EXTRACT_ARRAY(json_text_field, '$')) AS json_value
) WHERE id = '"expiry_date"'
Below is for BigQuery Standard SQL
#standardSQL
SELECT JSON_EXTRACT_SCALAR(el, '$.default_value') AS default_value
FROM `project.dataset.table`,
UNNEST(JSON_EXTRACT_ARRAY(json)) el
WHERE JSON_EXTRACT_SCALAR(el, '$.key') = 'expiry_date'
You can test above with sample / dummy data from y our question as in below example
#standardSQL
WITH `project.dataset.table` AS (
SELECT '''
[
{
"key":"expiry_date",
"type":"date",
"label":"Expiry Date",
"required":false,
"default_value":"2029-12-15"
},
{
"key":"brand",
"type":"text",
"label":"Brand",
"required":false,
"default_value":"clipsal"
}
]
''' json
)
SELECT JSON_EXTRACT_SCALAR(el, '$.default_value') AS default_value
FROM `project.dataset.table`,
UNNEST(JSON_EXTRACT_ARRAY(json)) el
WHERE JSON_EXTRACT_SCALAR(el, '$.key') = 'expiry_date'
with output
Row default_value
1 2029-12-15
Depends on your real use case - you can consider below variation
#standardSQL
SELECT *,
(
SELECT JSON_EXTRACT_SCALAR(el, '$.default_value')
FROM UNNEST(JSON_EXTRACT_ARRAY(json)) el
WHERE JSON_EXTRACT_SCALAR(el, '$.key') = 'expiry_date'
) AS default_value
FROM `project.dataset.table`