I have a column with a JSON Array and I need to extract transaction_id if there is one:
{
"transaction_id": "222222",
"feature_id": "e57dcd3c-7adf-11eb-9439-0242ac130002",
"provider_type": "thm",
"transaction_date": "2020-11-14",
"account_number": "XXXX",
"transaction_vendor": "Visa",
"transaction_type": "DEBIT",
"alert_type": "Large Card Purchase",
"threshold_amount": "1250.0",
"profile_id": "4fbf9b51-c737-434d-ae5c-63a35ac4a75a",
"alert_id": "d1cd1f80-ed0c-49c3-bf97-e71834f4aaa1",
"account_name": "BankXX",
"provider_id": "2",
"alert_date": "2022-11-05T01:31:17.000Z",
"create_date": "2022-11-05T01:31:17.000Z"
}
It could be as simple as this
Declare #YourTable Table ([ID] varchar(50),[JsonStr] varchar(max)) Insert Into #YourTable Values
(1,'{"transaction_id": "222222","feature_id": "e57dcd3c-7adf-11eb-9439-0242ac130002","provider_type": "thm","transaction_date": "2020-11-14","account_number": "XXXX","transaction_vendor": "Visa","transaction_type": "DEBIT","alert_type": "Large Card Purchase","threshold_amount": "1250.0","profile_id": "4fbf9b51-c737-434d-ae5c-63a35ac4a75a","alert_id": "d1cd1f80-ed0c-49c3-bf97-e71834f4aaa1","account_name": "BankXX","provider_id": "2","alert_date": "2022-11-05T01:31:17.000Z","create_date": "2022-11-05T01:31:17.000Z"}')
Select ID
,Trans_ID = json_value(JsonStr,'$.transaction_id')
From #YourTable
Results
ID Trans_ID
1 222222
I have used next:
JSON_VALUE(args, '$.transaction_id')
Related
Struggling to find an answer to the below JSON problem. I would like to display the entire SKU/Quantity list in the "shipmentItems" array to their respective column with a comma-separated value. My example below only allows me to display the first SKU/quantity from the array but, my goal is to get all listed in the columns with comma-separated.
JSON example:
{"shipments": [
{
"shipmentId": 100003768,
"orderNumber": "9648219086",
"shipDate": "2021-10-28",
"serviceCode": "ups_ground",
"shipmentItems": [
{
"orderItemId": 1464643208,
"lineItemKey": "10322938560608",
"sku": "SCPXTSS-BAG-06",
"name": "SCOOP-PLATE-06 (1000ml)",
"weight": {
"value": 0,
"units": "ounces",
"WeightUnits": 1
},
"quantity": 1,
"unitPrice": 0,
"taxAmount": null
},
{
"orderItemId": 1464643207,
"lineItemKey": "10322938527840",
"sku": "SCPZRTS-TRAY-01",
"name": "Beef: Tray 3 (Fill 004)<br>",
"weight": {
"value": 60,
"units": "ounces",
"WeightUnits": 1
},
"quantity": 1,
"unitPrice": 102.72,
"taxAmount": null
}
],
"labelData": null,
"formData": null
}
]
}
SQL query I'm using:
DECLARE #JSON varchar(max)
SELECT #JSON = BulkColumn
FROM OPENROWSET (BULK 'C:\Users\XPS-LT\json\today\shipments_20211031.json', SINGLE_CLOB)
IMPORT
SELECT *
FROM OPENJSON (#JSON, '$.shipments')
WITH
(
[shipmentId] bigint,
[orderNumber] nvarchar(60),
[shipDate] date,
[serviceCode] nvarchar(30),
[sku] nvarchar(MAX) N'$.shipmentItems[0].sku',
[quantity] int N'$.shipmentItems[0].quantity'
)
;
The "shipmentItems" part of the input JSON is an array, so you need an AS JSON clause in the first explicit schema and an additional OPENJSON() call:
DECLARE #json nvarchar(max)
...
SELECT
j.[shipmentId], j.[orderNumber], j.[shipDate], j.[serviceCode],
a.[sku], a.[quantity]
FROM OPENJSON (#json, '$.shipments') WITH (
[shipmentId] bigint,
[orderNumber] nvarchar(60),
[shipDate] date,
[serviceCode] nvarchar(30),
[shipmentItems] nvarchar(max) AS JSON
) j
OUTER APPLY (
SELECT
STRING_AGG([sku], ',') WITHIN GROUP (ORDER BY [orderItemId]),
STRING_AGG([quantity], ',') WITHIN GROUP (ORDER BY [orderItemId])
FROM OPENJSON (j.shipmentItems) WITH (
[orderItemId] int '$.orderItemId',
[sku] nvarchar(max) '$.sku',
[quantity] int N'$.quantity'
)
) a ([sku], [quantity])
Result:
shipmentId orderNumber shipDate serviceCode sku quantity
100003768 9648219086 2021-10-28 ups_ground SCPZRTS-TRAY-01,SCPXTSS-BAG-06 1,1
Is it possible to create JSON key value from a table SELECT statement, where column name as key and the column value as value
declare #T table(Id int, ItemName varchar(10), CategoryId int, ItemDate date)
insert into #T
values(1,'ABC',100, '1/1/2020')
to return something as below
{
"id": 1,
"table": "tableName",
"data": [{
"key": "ItemName",
"value": "ABC"
},
{
"key": "CategoryId",
"value": "100"
},
{
"key": "ItemDate",
"value": "1/1/2020"
}
]
}
I have looked at selecting as JSON but stuck here
select *
from #T
for json auto
You may try to use VALUES table value constructor and FOR JSON AUTO. As is mentioned in the documentation, when ... you specify the AUTO option, the format of the JSON output is automatically determined based on the order of columns in the SELECT list and their source tables.
Table:
CREATE TABLE Tbl (
Id int,
ItemName varchar(10),
CategoryId int,
ItemDate date
)
INSERT INTO Tbl
VALUES
(1, 'ABC', 100, '1/1/2020'),
(2, 'DEF', 200, '2/2/2020')
Statement:
SELECT t.Id, data.[key], data.[value]
FROM Tbl t
CROSS APPLY (VALUES
('ItemName', CONVERT(varchar(max), ItemName)),
('CategoryId', CONVERT(varchar(max), CategoryId)),
('ItemDate', CONVERT(varchar(max), ItemDate))
) Data ([key], [value])
FOR JSON AUTO
Result:
[
{
"Id":1,
"Data":[
{"key":"ItemName", "value":"ABC"},
{"key":"CategoryId","value":"100"},
{"key":"ItemDate","value":"2020-01-01"}
]
},
{
"Id":2,
"Data":[
{"key":"ItemName", "value":"DEF"},
{"key":"CategoryId", "value":"200"},
{"key":"ItemDate", "value":"2020-02-02"}
]
}
]
As an additional option you may try to build the inner JSON for each row:
SELECT
Id,
(
SELECT [key], [value]
FROM (VALUES
('ItemName', CONVERT(varchar(max), ItemName)),
('CategoryId', CONVERT(varchar(max), CategoryId)),
('ItemDate', CONVERT(varchar(max), ItemDate))
) v ([key], [value])
FOR JSON PATH
) AS Data
FROM Tbl
FOR JSON AUTO
I have a table PublicRelations with a column called Students in a SQL Server database called Subjects.
[
{ "Label": "Name", "ColumnValue": "Trudie" },
{ "Label": "Class", "ColumnValue": "PublicRelations" },
{ "Label": "Room", "ColumnValue": "8049" },
{ "Label": "HttpPath", "ColumnValue": "https://www.google.com/" }
]
I only get NULL when I run the below query using the Json_value. I'd like to get it to display the value from the array. I believe this may have to do with the 4000 character limit?
SELECT [StuduentID],
[Students],
--JSON_VALUE([Students],'$.ColumnValue') AS Name --Only returns NULL
FROM [Subjects].[dbo].[PublicRelations] c
CROSS APPLY OPENJSON(c.Students)
WITH ( Name int '$.Name',
Value nvarchar(255) '$.ColmunValue'
) AS jsonValues
WHERE jsonValues.ColumnValue = 'Trudie'
The query works and I can find what I need, but again, I only get NULL when I want to display that part of the JSON column in my results.
The statement is wrong and you has the following issues (as #MartinSmith already mentioned):
Syntax error - '$.ColmunValue' should be '$.ColumnValue'.
Wrong schema definition (the WITH clause) - I can't see Name key in the input JSON.
Wrong use of JSON_VALUE() - this function extracts scalar value from a JSON string, so JSON_VALUE([Students],'$.ColumnValue') returns NULL with this JSON input in lax mode.
You may try with the following statement (based on the statement in the question):
Table:
CREATE TABLE PublicRelations (
StudentID int,
Students nvarchar(1000))
INSERT INTO PublicRelations (StudentID, Students)
VALUES (1, N'[
{ "Label": "Name", "ColumnValue": "Trudie" },
{ "Label": "Class", "ColumnValue": "PublicRelations" },
{ "Label": "Room", "ColumnValue": "8049" },
{ "Label": "HttpPath", "ColumnValue": "https://www.google.com/" }
]')
Statement:
SELECT p.StudentID, j.*
FROM [PublicRelations] p
CROSS APPLY OPENJSON(p.Students) WITH (
Name nvarchar(50) '$.Label',
Value nvarchar(255) '$.ColumnValue'
) j
WHERE EXISTS (
SELECT 1
FROM OPENJSON(p.Students) WITH (Value nvarchar(255) '$.ColumnValue')
WHERE Value = N'Trudie'
) AND (j.Name IN ('Name', 'Class', 'Room'))
Result:
StudentID Name Value
1 Name Trudie
1 Class PublicRelations
1 Room 8049
I have written a code that reads information from JSON lines to table in SQL:
declare #pJSON varchar(max) = '
{
"School": "MiddleSchool",
"Password": "SchoolPassword",
"Attributes": [
{
"Type": "Exam",
"Value": "1"
},
{
"Type": "Class",
"Value": "11b"
},
{
"Type": "Math",
"Value": [
{
"ExamDate": "2019-01-01",
"Points": 100,
"Grade": 10,
"Notes": "Good"
}
]
}
]
} '
select ExamDate, Points, Grade, Notes
from OPENJSON(#pJSON, N'$.Attributes[2].Value')
cross apply openjson ([Value])
with
(
ExamDate date,
Points int,
Grade int,
Notes varchar(max)
) as [value]
Code works fine, but I really hate N'$.Attributes[2].Value' part. Exam information can be in the first, second, third place, so [2] doesn't really work for me. Do you have any suggestions for me, how I can improve this code? Thank you!
You could use JSON_QUERY:
select ExamDate, Points, Grade, Notes
from OPENJSON(JSON_QUERY(#pJSON, N'$.Attributes'))
with
(
ExamDate date N'$.Value[0].ExamDate', -- here 0 because Value is array too
Points int N'$.Value[0].Points',
Grade int N'$.Value[0].Grade',
Notes varchar(max) N'$.Value[0].Notes'
) as [value]
WHERE ExamDate IS NOT NULL;
db<>fiddle demo
EDIT:
In original question there was only one exam in array. If array could contain more than code should be adjusted:
SELECT s2.[key]
,ExamValue = JSON_VALUE(s2.value, '$.ExamDate')
,Points = JSON_VALUE(s2.value, '$.Points')
,Grade = JSON_VALUE(s2.value, '$.Grade')
,Notes = JSON_VALUE(s2.value, '$.Notes')
FROM OPENJSON(JSON_QUERY(#pJSON, N'$.Attributes')) s
CROSS APPLY OPENJSON(JSON_QUERY(s.value, N'$.Value')) s2;
-- or
SELECT [value].*
FROM OPENJSON(JSON_QUERY(#pJSON, N'$.Attributes'))
CROSS APPLY OPENJSON(JSON_QUERY(value, N'$.Value'))
with
(
ExamDate date N'$.ExamDate',
Points int N'$.Points',
Grade int N'$.Grade',
Notes varchar(max) N'$.Notes'
) as [value];
db<>fiddle demo
Question: What is best solution to generate JSON from a SQL query in MS SQL 2014? I created a procedure, but it is very slow.
My Example:
DECLARE #customers xml;
DECLARE #json NVARCHAR(max);
SET #customers = (SELECT * FROM dbo.Customers FOR XML path, root)
EXEC [dbo].[HTTP_JSON] #customers, #json
EXEC [dbo].[HTTP_JSON](#Shopping)
Create PROCEDURE [dbo].[HTTP_JSON]
#parameters xml, #response NVARCHAR(max) OUTPUT
WITH EXEC AS CALLER
AS
set #response = (SELECT Stuff(
(SELECT * from
(SELECT ',
{'+
Stuff((SELECT ',"'+coalesce(b.c.value('local-name(.)', 'NVARCHAR(MAX)'),'')+'":"'+
b.c.value('text()[1]','NVARCHAR(MAX)') +'"'
from x.a.nodes('*') b(c)
for xml path(''),TYPE).value('(./text())[1]','NVARCHAR(MAX)')
,1,1,'')+'}'
from #parameters.nodes('/root/*') x(a)
) JSON(theLine)
for xml path(''),TYPE).value('.','NVARCHAR(MAX)' )
,1,1,''))
GO
Just for fun, I created a scalar function based off of my prior answer.
Aside from the obvious XML parameter, I added two additional: 1) Include Header (illustrated below), and 2) ToLower case (I prefer my JSON field names in lower case which links to my classes and such).
If the query is more than one record, a formatted array will be returned.
Declare #Table table (ID int,Active bit,First_Name varchar(50),Last_Name varchar(50),EMail varchar(50))
Insert into #Table values
(1,1,'John','Smith','john.smith#email.com'),
(2,0,'Jane','Doe' ,'jane.doe#email.com')
Select A.ID
,A.Last_Name
,A.First_Name
,B.JSON
From #Table A
Cross Apply (Select JSON=[dbo].[udf-Str-JSON](0,1,(Select A.* For XML Raw)) ) B
Returns
ID Last_Name First_Name JSON
1 Smith John {"id":"1","active":"1","first_name":"John","last_name":"Smith","email":"john.smith#email.com"}
2 Doe Jane {"id":"2","active":"0","first_name":"Jane","last_name":"Doe","email":"jane.doe#email.com"}
Or even more simply
Select JSON=[dbo].[udf-Str-JSON](0,1,(Select * From #Table for XML RAW))
Returns with Header ON
{
"status": {
"successful": "true",
"timestamp": "2016-10-09 06:08:16 GMT",
"rows": "2"
},
"results": [{
"id": "1",
"active": "1",
"first_name": "John",
"last_name": "Smith",
"email": "john.smith#email.com"
}, {
"id": "2",
"active": "0",
"first_name": "Jane",
"last_name": "Doe",
"email": "jane.doe#email.com"
}]
}
Returns with Header Off
[{
"id": "1",
"active": "1",
"first_name": "John",
"last_name": "Smith",
"email": "john.smith#email.com"
}, {
"id": "2",
"active": "0",
"first_name": "Jane",
"last_name": "Doe",
"email": "jane.doe#email.com"
}]
The UDF
ALTER FUNCTION [dbo].[udf-Str-JSON] (#IncludeHead int,#ToLowerCase int,#XML xml)
Returns varchar(max)
AS
Begin
Declare #Head varchar(max) = '',#JSON varchar(max) = ''
; with cteEAV as (Select RowNr=Row_Number() over (Order By (Select NULL))
,Entity = xRow.value('#*[1]','varchar(100)')
,Attribute = xAtt.value('local-name(.)','varchar(100)')
,Value = xAtt.value('.','varchar(max)')
From #XML.nodes('/row') As R(xRow)
Cross Apply R.xRow.nodes('./#*') As A(xAtt) )
,cteSum as (Select Records=count(Distinct Entity)
,Head = IIF(#IncludeHead=0,IIF(count(Distinct Entity)<=1,'[getResults]','[[getResults]]'),Concat('{"status":{"successful":"true","timestamp":"',Format(GetUTCDate(),'yyyy-MM-dd hh:mm:ss '),'GMT','","rows":"',count(Distinct Entity),'"},"results":[[getResults]]}') )
From cteEAV)
,cteBld as (Select *
,NewRow=IIF(Lag(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,'',',{')
,EndRow=IIF(Lead(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,',','}')
,JSON=Concat('"',IIF(#ToLowerCase=1,Lower(Attribute),Attribute),'":','"',Value,'"')
From cteEAV )
Select #JSON = #JSON+NewRow+JSON+EndRow,#Head = Head From cteBld, cteSum
Return Replace(#Head,'[getResults]',Stuff(#JSON,1,1,''))
End
-- Parameter 1: #IncludeHead 1/0
-- Parameter 2: #ToLowerCase 1/0 (converts field name to lowercase
-- Parameter 3: (Select * From ... for XML RAW)
**EDIT - Corrected Typo
The following should create the JSON array for just about any data set. However, I have not created a way to convert bit to true/false yet.
Just one point to consider: The FIRST column in the initial SELECT has to be the Primary Key which is equates to the ENTITY field. In this case, Select * from #User for XML RAW ... ID is the Entity and just so happens to be the first field in the table
As far as performance, 500 records with 19 fields creates a JSON string 191,987 bytes in 0.694 seconds (50 records in 0.098 seconds)
Consider the following:
Declare #User table (ID int,Active bit,First_Name varchar(50),Last_Name varchar(50),EMail varchar(50),LastOn DateTime)
Insert into #User values
(1,1,'John','Smith','john.smith#email.com','2016-10-05 17:32:41.903'),
(2,0,'Jane','Doe' ,'jane.doe#email.com','2016-10-05 08:25:18.203')
Declare #XML xml = (Select * From #User for XML RAW)
Declare #JSON varchar(max) = ''
;with cteEAV as (
Select RowNr = Row_Number() over (Order By (Select NULL))
,Entity = xRow.value('#*[1]','varchar(100)')
,Attribute = xAtt.value('local-name(.)','varchar(100)')
,Value = xAtt.value('.','varchar(max)')
From #XML.nodes('/row') As A(xRow)
Cross Apply A.xRow.nodes('./#*') As B(xAtt) )
,cteBld as (
Select *
,NewRow = IIF(Lag(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,'',',{')
,EndRow = IIF(Lead(Entity,1) over (Partition By Entity Order By (Select NULL))=Entity,',','}')
,JSON = Concat('"',Attribute,'":','"',Value,'"')
From cteEAV )
Select #JSON = #JSON+NewRow+JSON+EndRow
From cteBld
Select '['+Stuff(#JSON,1,1,'')+']'
Returns
[{"ID":1, "Active":1, "First_Name":"John", "Last_Name":"Smith", "EMail":"john.smith#email.com", "LastOn":"2016-10-05T17:32:41.903", "TotalSales":25569.0000} ,{"ID":2, "Active":0, "First_Name":"Jane", "Last_Name":"Doe", "EMail":"jane.doe#email.com", "LastOn":"2016-10-05T08:25:18.203", "TotalSales":22888.0000}]
A more readable version
cteEAV will dynamically unpivot the data and generate the following:
cteBLD will extend and add flags New/End Row
The Final Select
This will put it all together and generate one final string which can be wrapped or nested as you please.