Update properties in SQL Json fields with nested objects - sql

I'm new to JSON methods in SQL
I'm working with a huge JSON field, which has such a structure :
{
"A": 1,
"B": 2,
"C": 0,
"data": [
{
"id": "id_i_want",
"value": "[{
"prop1":7,
"prop2":"X",
"prop3":"4",
"passages":[
{
"id":0,
"number":"E24",
"date":"11/12/2019"
},
{
"id":0,
"number":"F28",
"date":"11/11/2019"
},
{
...
}
]
}]
},
{
"id": "id_i_do_NOT_want",
"value": Same structure as above
}
]
}
This JSON fields is stored in a nvarchar(MAX) field in SQLServer.
So the JSON has a property data, which contains a list of elements.
These elements have a value property, which contains a list of passages.
All the passages currently have id = 0
What I need to do :
I would like ton increment all the id of passages, starting from 1, but only the ones in the object which has the ID id_i_want, and NOT the others.
How can I do that with a SQL script ?
I tried to follow this post, but without success
Any help appreciated

First change the Json data to tabular data, then update the table, and then convert the table back to Json.
I have prepared the following code, you can use it easily with a little change.
declare #varData nvarchar(max) = '{
"A": 1,
"B": 2,
"C": 0,
"data": [
{
"id": "id_i_want",
"value": [{
"prop1":7,
"prop2":"X",
"prop3":"4",
"passages":[
{
"id":0,
"number":"E24",
"date":"11/12/2019"
},
{
"id":0,
"number":"F28",
"date":"11/11/2019"
}
]
}]
},
{
"id": "id_i_do_NOT_want"
}
]
}';
DECLARE #jsontable TABLE (A varchar(5), b varchar(5), c varchar(5),id NVARCHAR(50),prop1 int,prop2 varchar(5),prop3 varchar(5),mid int ,number varchar(5),date date);
DECLARE #maintable TABLE (A varchar(5), b varchar(5), c varchar(5),id NVARCHAR(50),prop1 int,prop2 varchar(5),prop3 varchar(5),mid int ,number varchar(5),date date);
insert into #jsontable
SELECT A,b,C,id,prop1,prop2,prop3,mid,number,date
FROM OPENJSON(#varData)
WITH (
A varchar(5) '$.A',
B varchar(5) '$.B',
C varchar(5) '$.C',
jdata NVARCHAR(MAX) '$.data' AS JSON
)
OUTER APPLY OPENJSON(jdata)
WITH (
id NVARCHAR(50) '$.id',
jvalue NVARCHAR(MAX) '$.value' AS JSON
)
OUTER APPLY OPENJSON(jvalue)
WITH (
prop1 int '$.prop1',
prop2 varchar(5) '$.prop2',
prop3 varchar(5) '$.prop3',
jpassages NVARCHAR(MAX) '$.passages' AS JSON
)
OUTER APPLY OPENJSON(jpassages)
WITH (
mid int '$.id',
number varchar(5) '$.number',
date date '$.date'
)
where id = 'id_i_want'
Declare #A varchar(5)
Declare #b varchar(5)
Declare #c varchar(5)
Declare #id NVARCHAR(50)
Declare #prop1 int
Declare #prop2 varchar(5)
Declare #prop3 varchar(5)
Declare #mid int = 0
Declare #number varchar(5)
Declare #date date
While((Select Count(*) From #jsontable)>0)
Begin
set #mid = #mid + 1
Set #A=(Select Top 1 A From #jsontable)
Set #B=(Select Top 1 B From #jsontable)
Set #C=(Select Top 1 C From #jsontable)
Set #id=(Select Top 1 id From #jsontable)
Set #prop1=(Select Top 1 prop1 From #jsontable)
Set #prop2=(Select Top 1 prop2 From #jsontable)
Set #prop3=(Select Top 1 prop3 From #jsontable)
Set #number=(Select Top 1 number From #jsontable)
Set #date=(Select Top 1 date From #jsontable)
insert into #maintable values
(#A,#B,#C,#id,#prop1,#prop2,#prop3,#mid,#number,#date)
Delete #jsontable Where A=#A and B = #B and C = #C and id = #id and prop1 = #prop1
and prop2 = #prop2 and prop3 = #prop3 and number = #number and date = #date
End
select * from #maintable
demo in db<>fiddle

Related

Using JSON_VALUE for parse column in SQL Server table

I have never worked with JSON in SQL Server before that's why need some help.
I have written a simple snippet of code:
DECLARE #json NVARCHAR(4000)
SET #json =
N'{
"id":"40476",
"tags":[
{
"id":"5f5883",
},
{
"id":"5fc8",
}
],
"type":"student",
"external_id":"40614476"
}'
SELECT
JSON_value(#json, '$.tags[0].id') as tags
In sample above I write code how get first "id" from "tags".
But how looks like script if in "tags" not 2 "id", but an unknown number this "id" and result should be in column like this:
1 5f5883
2 5fc8
You may use OPENJSON() with explicit schema to parse the $.tags JSON array:
DECLARE #json NVARCHAR(4000)
SET #json =
N'{
"id":"40476",
"tags":[
{
"id":"5f5883"
},
{
"id":"5fc8"
}
],
"type":"student",
"external_id":"40614476"
}'
SELECT id
FROM OPENJSON(#json, '$.tags') WITH (id varchar(10) '$.id')
Result:
id
------
5f5883
5fc8
If you want to get the index of each id in the $.tags JSON array, then you need a combination of OPENJSON() with default schema and JSON_VALUE():
SELECT CONVERT(int, [key]) AS rn, JSON_VALUE([value], '$.id') AS id
FROM OPENJSON(#json, '$.tags')
Result:
rn id
----------
0 5f5883
1 5fc8

Storing flattened JSON (key-value pairs) with variable value types

I have JSON documents with no defined schema. Each document may have different schema and values.
{
"location":
{
"latitude": 58.23,
"longitude": 25.11
},
"building": "A1",
"active": true,
"parameters": [ 1, { "scanInterval": 1000 } ]
}
These JSON documents are flattened - formatted into Key-Value pairs.
{
"location.latitude": 58.23,
"location.longitude": 25.11,
"building": "A1",
"active": true,
"parameter[0]": 1,
"parameter[1].scanInterval": 1000
}
Key is always String.
Value can be String, Number, Boolean.
These key-value pairs will be stored in SQL table. The requirement is to be able to filter key-values based on their JSON native values.
SELECT .... FROM ... WHERE [Key] = #key AND [Value] > #value; -- [Value] is integer/float
SELECT .... FROM ... WHERE [Key] = #key AND [Value] != #value; -- [Value] is bit/boolean
SELECT .... FROM ... WHERE [Key] = #key AND [Value] = #value; -- [Value] is string
Which makes me question - how do I design my table?
OPTION A) Casting.
CREATE TABLE [dbo].[OptionA](
....
[Key] [nvarchar](max),
[ValueType] [nvarchar](max)
[Value] [nvarchar](max)
)
Always store [Value] as String, when querying data, select rows with matching [ValueType] and cast value:
... WHERE [ValueType] = 'Number' AND [Key] = #key AND CAST([Value] AS FLOAT) > #value
OPTION B) Column for each value type.
CREATE TABLE [dbo].[OptionB](
....
[Key] [nvarchar](50),
[StringValue] [nvarchar](50) NULL,
[NumericValue] [float] NULL,
[BooleanValue] [bit] NULL
)
There are 3 columns. Each column for 1 value type. Out of all 3 columns only 1 can contain value and the rest are NULL.
When querying data, select the column with the appropriate value type:
SELECT .... FROM ... WHERE [Key] = #key AND [NumericValue] > #value
Which option yields the best results or is/seems overall better? Perhaps there are some other better alternatives?
I'm leaning more towards the A) approach, however all the casting may be adding extra complexity and may have potential performance hits.
A little ugly, and not fully tested, but perhaps this will give you a nudge in the right direction.
I should note that SEQuence is optional
Example
Declare #JSON varchar(max) = '
{
"location":
{
"latitude": 58.23,
"longitude": 25.11
},
"building": "A1",
"active": true,
"parameters": [ 1, { "scanInterval": 1000 } ]
}
';
with cte0 as (
Select *
,spath = convert(varchar(max),[key])
,seq = convert(varchar(250),10000+row_number() over(order by 1/0))
From OpenJSON(#json,'$')
Union All
Select R.*
,spath = convert(varchar(max),concat(P.spath,case when try_convert(int,r.[key]) is not null and P.[type]>3 then quotename(r.[key]) else '.'+r.[key] end))
,seq = convert(varchar(250),concat(P.seq,'\',10000+row_number() over(order by 1/0)))
From cte0 P
Cross Apply OpenJSON(P.[Value],'$') R
Where P.[Type]>3
)
Select [key] = spath
,value
,[Type] = choose([Type],'string','numeric','bool','array','object')
from cte0
Where [type]<=3
Order By seq
Results
key value Type
location.latitude 58.23 numeric
location.longitude 25.11 numeric
building A1 string
active true bool
parameters[0] 1 numeric
parameters[1].scanInterval 1000 numeric

Create table from openjson(#json) results Azure SQL

I am trying to make a table out of the [key] row values from a
select * from openjson(#json) statement. The openjson(#json) statement gives me results that contains 53 [key] row values, here's a small snippet:
[key] [value] [type]
_id 5b05390c5d222f0059209918 1
ean 65485555 1
name NULL 0
holder {"_id":"5b0538355d222f00585db6f1","name":"***... 5
root {"_id":"5b05390c5d222f005920990a","holder":{"_id":"5b0538885... 5
assigner {"_id":"5b0538885d222f00570aca19","name":"***... 5
created 2018-05-23T09:49:00+0000 1
children [] 4
address 1
timezone Etc/GMT-1 1
I want a table that look something like this:
table1
[_id] [ean] [name] [holder] [etc...]
5b05390c5d222f0059209918 65485555 NULL {"_id":"5b0538355d222...}
I also want to be able to insert values from another JSON into the same table
insert into table1 ()
select [value] from openjson(#json2)
Thank you!
Just add a WITH clause to your OPENJSON query.
See OPENJSON, eg:
DECLARE #json NVARCHAR(MAX) = N'[
{
"Order": {
"Number":"SO43659",
"Date":"2011-05-31T00:00:00"
},
"AccountNumber":"AW29825",
"Item": {
"Price":2024.9940,
"Quantity":1
}
},
{
"Order": {
"Number":"SO43661",
"Date":"2011-06-01T00:00:00"
},
"AccountNumber":"AW73565",
"Item": {
"Price":2024.9940,
"Quantity":3
}
}
]'
SELECT *
FROM OPENJSON ( #json )
WITH (
Number varchar(200) '$.Order.Number',
Date datetime '$.Order.Date',
Customer varchar(200) '$.AccountNumber',
Quantity int '$.Item.Quantity',
[Order] nvarchar(MAX) AS JSON
)

SQL Server 2016, return json and non json together

Is there a way to return json and non json data together? For example:
Column1 | Column2
1 | {["something":"value", "column2": "value2"]}
Try use AS JSON
DECLARE #json NVARCHAR(MAX) = N'
{
"Other":[{
"something": "value",
"column2": "value2"}],
"Id": 1
}'
SELECT *
FROM OPENJSON ( #json )
WITH (
[Id] int '$.Id',
[Other] nvarchar(MAX) AS JSON
)

How to make JSON from SQL query in MS SQL 2014

Question: What is best solution to generate JSON from a SQL query in MS SQL 2014? I created a procedure, but it is very slow.
My Example:
DECLARE #customers xml;
DECLARE #json NVARCHAR(max);
SET #customers = (SELECT * FROM dbo.Customers FOR XML path, root)
EXEC [dbo].[HTTP_JSON] #customers, #json
EXEC [dbo].[HTTP_JSON](#Shopping)
Create PROCEDURE [dbo].[HTTP_JSON]
#parameters xml, #response NVARCHAR(max) OUTPUT
WITH EXEC AS CALLER
AS
set #response = (SELECT Stuff(
(SELECT * from
(SELECT ',
{'+
Stuff((SELECT ',"'+coalesce(b.c.value('local-name(.)', 'NVARCHAR(MAX)'),'')+'":"'+
b.c.value('text()[1]','NVARCHAR(MAX)') +'"'
from x.a.nodes('*') b(c)
for xml path(''),TYPE).value('(./text())[1]','NVARCHAR(MAX)')
,1,1,'')+'}'
from #parameters.nodes('/root/*') x(a)
) JSON(theLine)
for xml path(''),TYPE).value('.','NVARCHAR(MAX)' )
,1,1,''))
GO
Just for fun, I created a scalar function based off of my prior answer.
Aside from the obvious XML parameter, I added two additional: 1) Include Header (illustrated below), and 2) ToLower case (I prefer my JSON field names in lower case which links to my classes and such).
If the query is more than one record, a formatted array will be returned.
Declare #Table table (ID int,Active bit,First_Name varchar(50),Last_Name varchar(50),EMail varchar(50))
Insert into #Table values
(1,1,'John','Smith','john.smith#email.com'),
(2,0,'Jane','Doe' ,'jane.doe#email.com')
Select A.ID
,A.Last_Name
,A.First_Name
,B.JSON
From #Table A
Cross Apply (Select JSON=[dbo].[udf-Str-JSON](0,1,(Select A.* For XML Raw)) ) B
Returns
ID Last_Name First_Name JSON
1 Smith John {"id":"1","active":"1","first_name":"John","last_name":"Smith","email":"john.smith#email.com"}
2 Doe Jane {"id":"2","active":"0","first_name":"Jane","last_name":"Doe","email":"jane.doe#email.com"}
Or even more simply
Select JSON=[dbo].[udf-Str-JSON](0,1,(Select * From #Table for XML RAW))
Returns with Header ON
{
"status": {
"successful": "true",
"timestamp": "2016-10-09 06:08:16 GMT",
"rows": "2"
},
"results": [{
"id": "1",
"active": "1",
"first_name": "John",
"last_name": "Smith",
"email": "john.smith#email.com"
}, {
"id": "2",
"active": "0",
"first_name": "Jane",
"last_name": "Doe",
"email": "jane.doe#email.com"
}]
}
Returns with Header Off
[{
"id": "1",
"active": "1",
"first_name": "John",
"last_name": "Smith",
"email": "john.smith#email.com"
}, {
"id": "2",
"active": "0",
"first_name": "Jane",
"last_name": "Doe",
"email": "jane.doe#email.com"
}]
The UDF
ALTER FUNCTION [dbo].[udf-Str-JSON] (#IncludeHead int,#ToLowerCase int,#XML xml)
Returns varchar(max)
AS
Begin
Declare #Head varchar(max) = '',#JSON varchar(max) = ''
; with cteEAV as (Select RowNr=Row_Number() over (Order By (Select NULL))
,Entity = xRow.value('#*[1]','varchar(100)')
,Attribute = xAtt.value('local-name(.)','varchar(100)')
,Value = xAtt.value('.','varchar(max)')
From #XML.nodes('/row') As R(xRow)
Cross Apply R.xRow.nodes('./#*') As A(xAtt) )
,cteSum as (Select Records=count(Distinct Entity)
,Head = IIF(#IncludeHead=0,IIF(count(Distinct Entity)<=1,'[getResults]','[[getResults]]'),Concat('{"status":{"successful":"true","timestamp":"',Format(GetUTCDate(),'yyyy-MM-dd hh:mm:ss '),'GMT','","rows":"',count(Distinct Entity),'"},"results":[[getResults]]}') )
From cteEAV)
,cteBld as (Select *
,NewRow=IIF(Lag(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,'',',{')
,EndRow=IIF(Lead(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,',','}')
,JSON=Concat('"',IIF(#ToLowerCase=1,Lower(Attribute),Attribute),'":','"',Value,'"')
From cteEAV )
Select #JSON = #JSON+NewRow+JSON+EndRow,#Head = Head From cteBld, cteSum
Return Replace(#Head,'[getResults]',Stuff(#JSON,1,1,''))
End
-- Parameter 1: #IncludeHead 1/0
-- Parameter 2: #ToLowerCase 1/0 (converts field name to lowercase
-- Parameter 3: (Select * From ... for XML RAW)
**EDIT - Corrected Typo
The following should create the JSON array for just about any data set. However, I have not created a way to convert bit to true/false yet.
Just one point to consider: The FIRST column in the initial SELECT has to be the Primary Key which is equates to the ENTITY field. In this case, Select * from #User for XML RAW ... ID is the Entity and just so happens to be the first field in the table
As far as performance, 500 records with 19 fields creates a JSON string 191,987 bytes in 0.694 seconds (50 records in 0.098 seconds)
Consider the following:
Declare #User table (ID int,Active bit,First_Name varchar(50),Last_Name varchar(50),EMail varchar(50),LastOn DateTime)
Insert into #User values
(1,1,'John','Smith','john.smith#email.com','2016-10-05 17:32:41.903'),
(2,0,'Jane','Doe' ,'jane.doe#email.com','2016-10-05 08:25:18.203')
Declare #XML xml = (Select * From #User for XML RAW)
Declare #JSON varchar(max) = ''
;with cteEAV as (
Select RowNr = Row_Number() over (Order By (Select NULL))
,Entity = xRow.value('#*[1]','varchar(100)')
,Attribute = xAtt.value('local-name(.)','varchar(100)')
,Value = xAtt.value('.','varchar(max)')
From #XML.nodes('/row') As A(xRow)
Cross Apply A.xRow.nodes('./#*') As B(xAtt) )
,cteBld as (
Select *
,NewRow = IIF(Lag(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,'',',{')
,EndRow = IIF(Lead(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,',','}')
,JSON = Concat('"',Attribute,'":','"',Value,'"')
From cteEAV )
Select #JSON = #JSON+NewRow+JSON+EndRow
From cteBld
Select '['+Stuff(#JSON,1,1,'')+']'
Returns
[{"ID":1, "Active":1, "First_Name":"John", "Last_Name":"Smith", "EMail":"john.smith#email.com", "LastOn":"2016-10-05T17:32:41.903", "TotalSales":25569.0000} ,{"ID":2, "Active":0, "First_Name":"Jane", "Last_Name":"Doe", "EMail":"jane.doe#email.com", "LastOn":"2016-10-05T08:25:18.203", "TotalSales":22888.0000}]
A more readable version
cteEAV will dynamically unpivot the data and generate the following:
cteBLD will extend and add flags New/End Row
The Final Select
This will put it all together and generate one final string which can be wrapped or nested as you please.