I have a data table with JSON documents in one column right now I iterate through each row to pivot out a(n) JSON array and flatten it out into another table. Is there a way to do this in a bulk action instead of an iteration process?
DECLARE #json varchar(max)
SET #json = (SELECT document
FROM dbo.MYJsonTable WITH (NOLOCK)
WHERE rawid = 4170159) -- unique identity value
SELECT
b.objectid,
b.staffRole,
b.statusName,
b.gwySync,
b.createdate,
b.rawid,
b.loaddate,
ROW_NUMBER() OVER (PARTITION BY objectid ORDER BY createdate) as row_num
FROM
(SELECT
JSON_Value(RePlace(#json,'$','') ,'$._id.oid') [objectid],
staffRole,
[statusName],
[gwySync],
DATEADD (hour, -6, DATEADD(MS, TRY_CONVERT(BIGINT, LEFT(Replace([$numberLong], '$', ''),10)), '1970-01-01 00:00:00')) [createdate],
4170159 [rawid],
GETDATE() [loaddate]
FROM
OPENJSON(#json, '$.patientJourneySteps')
WITH
(statusLog NVARCHAR(max) as JSON,
staffRole nvarchar(50) '$.staffRole') patientJourneySteps
OUTER APPLY
OPENJSON(patientJourneySteps.statusLog, '$')
WITH
(statusName NVARCHAR(15) '$.statusName',
gwySync nvarchar(20) '$.gwySync',
createdDate NVARCHAR(max) AS JSON) logs
OUTER APPLY
OPENJSON(logs.createdDate,'$')
WITH
([$date] NVARCHAR(max) AS JSON) DateJson
OUTER APPLY
OPENJSON(DateJson.[$date],'$')
WITH
([$numberLong] NVARCHAR(30))
) b
Related
I have in my table rows with JSON same as a example:
json_data
[{"d":"2021-05-05T12:16:18.9175335","l":"temp12#cor.net","a":"test1","c":"Kom1"}]
[{"d":"2021-05-05T12:16:37.7258608","l":"temp12#cor.net","a":"test2","c":"Kom2"}]
[{"d":"2021-05-05T12:17:30.2390585","l":"temp12#cor.net","a":"test3","c":"Kom3"}]
I want to get data in table format. When is 1 row i don't have problem using:
DECLARE #JSONINPUT NVARCHAR(max)
SET #JSONINPUT = (select top 1 cast(json_data as varchar(max)) from mytable )
IF (ISJSON(#JSONINPUT) = 1)
BEGIN
SELECT * from OPENJSON(#JSONINPUT )
WITH (
[Kom] nvarchar(max) '$.c',
[Date] DATETIME2 '$.d',
[User] nvarchar(150) '$.a'
);
END
and i get:
Com
Date
User
Kom1
2021-05-05 12:16:18.9175335
test1
But I don't know how to get data from all rows.
Use CROSS APPLY with OPENJSON
SELECT j.Kom, j.[Date], j.[User]
FROM mytable
CROSS APPLY OPENJSON(json_data)
WITH (
[Kom] nvarchar(max) '$.c',
[Date] DATETIME2 '$.d',
[User] nvarchar(150) '$.a'
) AS j;
The syntax, as I mentioned, is no different:
SELECT OJ.Kom,
OJ.[Date],
OJ.[User]
FROM dbo.YourTable YT
CROSS APPLY OPENJSON(YT.JSONColumn)
WITH ([Kom] nvarchar(max) '$.c',
[Date] DATETIME2 '$.d',
[User] nvarchar(150) '$.a') OJ;
I am trying to add an array to another array using JSON_MODIFY.
The situation is, I have an array kind stored json data in database. It looks like this:
declare #base nvarchar(max) = '[{"name":"base"}]';
And I am getting another set of data which is also in the shape of array:
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
I am trying to use JSON_MODIFY and JSON_QUERY magics to append them together but it gives me unexpected results.
declare #base nvarchar(max) = '[{"name":"base"}]';
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
set #base = JSON_MODIFY(#base,'append $',JSON_QUERY(#test1));
select #base;
Output:
[{"name":"base"}, [{"name":"test1"},{"name":"example1"}]]
But what I want is using those methods to make it work like kind of Add-Range:
[{"name":"base"},{"name":"test1"},{"name":"example1"}]
I am kind of lost on this process and I don't know where to look at for this kind of functionality.
I will use this from a C# service to directly modify through the code. That's why I cannot use Store procedures and functions as well.
Edit #1:
With regarding to reply from #Salman A, i appreciate your answer but the thing is, as i said earlier, it is a little bit difficult to use on my query running through code. Which is:
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
UPDATE dbo.ExampleTable
SET [Data] = JSON_MODIFY([Data], 'append $', JSON_QUERY(#test1))
WHERE [UniqueId] = 'some_guid_here'
I have tried it to adapt the answer that i like this :
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
UPDATE dbo.ExampleTable
SET [Data] = (
select [Data] = JSON_MODIFY([Data],'append $',item)
from OPENJSON(#test1)
with ([item] nvarchar(max) '$' as JSON)
)
WHERE [UniqueId] = 'some_id'
Actually, it works if #test1 only have 1 item, but in case of having more than 1 item in #test1, it gives the error:
Subquery returned more than 1 value. This is not permitted when the subquery follows = .....
What is the logical way to use this in a update set subquery
You can use OPENJSON to convert the array to rows and append items one by one:
declare #base nvarchar(max) = '[{"name":"base"}]';
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
select #base = json_modify(#base, 'append $', item)
from openjson(#test1)
with ([item] nvarchar(max) '$' as json);
select #base;
Returns:
[{"name":"base"},{"name":"test1"},{"name":"example1"}]
Revised answer for update query
If you're using SQL Server 2017+ then a reasonably safe solution is to concatenate the array using STRING_AGG but build individual rows using JSON functions. It is relatively easy to use this idea in an update query:
DECLARE #base NVARCHAR(MAX) = '[{"name":"base"}]';
DECLARE #test NVARCHAR(MAX) = '[{"foo":"bar"},{"baz":"meh"}]';
SELECT '[' + STRING_AGG(jsonstr, ',') WITHIN GROUP (ORDER BY pos) + ']'
FROM (
SELECT value, 1000 + [key] FROM OPENJSON(#base)
UNION ALL
SELECT value, 2000 + [key] FROM OPENJSON(#test)
) AS x(jsonstr, pos);
Alternately, you can use a recursive CTE that calls JSON_MODIFY multiple times to build the JSON; you can use the result in update query:
CREATE TABLE t(
id INT NOT NULL PRIMARY KEY IDENTITY,
data NVARCHAR(MAX)
);
INSERT INTO t(data) VALUES
('[{"name":"1.1"}]'),
('[{"name":"2.1"},{"name":"2.2"}]');
WITH rows(data, pos) AS (
SELECT value, [key]
FROM OPENJSON('[{"foo":"bar"},{"baz":"meh"}]')
), rcte(id, data, pos) AS (
SELECT id, data, -1
FROM t
UNION ALL
SELECT prev.id, JSON_MODIFY(prev.data, 'append $', JSON_QUERY(curr.data)), prev.pos + 1
FROM rcte AS prev
JOIN rows AS curr ON curr.pos = prev.pos + 1
)
UPDATE t
SET data = (
SELECT TOP 1 data
FROM rcte
WHERE id = t.id
ORDER BY pos DESC
);
Demo on db<>fiddle
I have an array of json in a SQL Server column, I am trying to update all names to 'Joe'.
I tried the below code , but it is updating only first element of the json array
CREATE TABLE #t (I INT, JsonColumn NVARCHAR(MAX) CHECK (ISJSON(JsonColumn) > 0))
INSERT INTO #t
VALUES (1, '[{"id":"101","name":"John"}, {"id":"102","name":"peter"}]')
INSERT INTO #t VALUES (2,'[{"id":"103","name":"dave"}, {"id":"104","name":"mark"}]')
SELECT * FROM #t
SELECT * FROM #t
CROSS APPLY OPENJSON(JsonColumn) s
WITH cte AS
(
SELECT *
FROM #t
CROSS APPLY OPENJSON(JsonColumn) s
)
UPDATE cte
SET JsonColumn = JSON_MODIFY(JsonColumn, '$[' + cte.[key] + '].name', 'Joe')
SELECT * FROM #t
-- DROP TABLE #t
It is only updating the first element of array to joe
Current result:
[{"id":"101","name":"Joe"}, {"id":"102","name":"cd"}]
[{"id":"103","name":"Joe"}, {"id":"104","name":"mark"}]
Expected
[{"id":"101","name":"Joe"}, {"id":"102","name":"Joe"}]
[{"id":"103","name":"Joe"}, {"id":"104","name":"Joe"}]
Since you want to do in one transaction, I could not think of any other ways than to create another table and store the values into new table and use for XML path with the value. Problem is you are trying to update JSON array and I am not sure how would you update the same row twice with different value. With cross apply as you have shown it creates two rows and then only you can update it to JOE.
Your query will update name = Joe for ID = 101 for first row, and Name = Joe for ID = 102 based on value column. Since these are on two different rows you are seeing only one change in your temp table.
I created one more #temp2 table to store those values and use XML path to concatenate. The final table will be #t2 table for your expected results.
SELECT *
into #t2
FROM #t
CROSS APPLY OPENJSON(JsonColumn) s
select *, json_value (value, '$.name') from #t2
UPDATE #t2
SET value = JSON_MODIFY(value, '$.name', 'Joe')
select t.I ,
JSONValue = concat('[',stuff((select ',' + value from #t2 t1
where t1.i = t.i
for XML path('')),1,1,''),']')
from #t2 t
group by t.I
Output:
I JSONValue
1 [{"id":"101","name":"Joe"},{"id":"102","name":"Joe"}]
Updating original table:
update t
set t.JsonColumn =t2.JSONValue
from #t t
join (select t.I ,
JSONValue = concat('[',stuff((select ',' + value from #t2 t1
where t1.i = t.i
for XML path('')),1,1,''),']')
from #t2 t
group by t.I ) t2 on t.I = t2.i
I think that it is impossible to apply more updates to one record with one command. So you need to explode JSON array to records.
You can do this with a Temporary or Variable Table and a Cursor.
-- Declare the Variable Table
DECLARE #JsonTable TABLE (
RecordKey UNIQUEIDENTIFIER,
ArrayIndex INT,
ObjKey NVARCHAR(100),
ObjValue NVARCHAR(1000)
);
-- Fill the Variable Table
INSERT INTO #JsonTable
SELECT TB1.pk as RecordKey,
TB1data.[key] AS ArrayIndex,
TB1dataItem.[key] as ObjKey,
TB1dataItem.[value] as ObjValue
FROM MyTable TB1
CROSS APPLY OPENJSON(JSON_QUERY(TB1.data, '$.list')) TB1data
CROSS APPLY OPENJSON(JSON_QUERY(TB1data.value, '$')) TB1dataItem
WHERE TB1dataItem.[key] = 'name'
-- Declare Cursor and relative variables
DECLARE #recordKey UNIQUEIDENTIFIER,
#recordData NVARCHAR(MAX),
#arrayIndex INT,
#objKey NVARCHAR(100),
#objValue NVARCHAR(1000);
DECLARE JsonCursor CURSOR FAST_FORWARD READ_ONLY FOR
SELECT * FROM #JsonTable;
-- Use Cursor to read any json array item
OPEN JsonCursor;
FETCH NEXT
FROM JsonCursor
INTO #recordKey, #arrayIndex, #objKey, #objValue;
WHILE ##FETCH_STATUS = 0 BEGIN
UPDATE TB1
SET data = JSON_MODIFY(
data,
'$.list[' + CAST(#arrayIndex as VARCHAR(20)) + '].name',
'Joe'
)
FROM MyTable TB1
WHERE TB1.pk = #recordKey;
FETCH NEXT
FROM JsonCursor
INTO #recordKey, #arrayIndex, #objKey, #objValue;
END;
CLOSE JsonCursor;
DEALLOCATE JsonCursor;
Do you need this?
CREATE TABLE #t (
I INT,
JsonColumn NVARCHAR(MAX) CHECK (ISJSON(JsonColumn) > 0)
);
INSERT INTO #t
VALUES (1, '[{"id":"101","name":"John"}, {"id":"102","name":"peter"}]');
INSERT INTO #t
VALUES (2, '[{"id":"103","name":"dave"}, {"id":"104","name":"mark"}]');
SELECT CONCAT('[', STRING_AGG(JSON_MODIFY(JSON_MODIFY('{}', '$.id', j.id), '$.name', 'John'), ','), ']')
FROM #t t
CROSS APPLY OPENJSON(JsonColumn) WITH (id INT, name sysname) j
GROUP BY t.I
I want to execute this code:
if exists(select * from FileUploadFileTable..sysobjects where xtype='u' and name='##Tmp830963' )
drop table ##Tmp830963
CREATE table ##Tmp830963 (RowId int,Files varchar(MAX) ,Files_Name NVARCHAR(MAX), Files_Size bigint,Job_Id bigint, Files_Type VARCHAR(MAX) , User_id bigint ,User_Name NVARCHAR(MAX)) Insert into ##Tmp830963(RowId,Files,Files_Name,Files_Size , Files_Type) select A.row_num,A.Items,O.Items,B.Items,C.Items
from(
SELECT Items,row_number() over (order by (select 0)) as row_num
FROM dbo.Func_Split('/9j/AqAAAAB4CgAwAEAAABAAAAAAA', '^') ) A
join
(SELECT Items,row_number() over (order by (select 0)) as row_num
FROM dbo.Func_Split('tt^', '^') ) O on O.row_num=A.row_num
join
(SELECT Items,row_number() over (order by (select 0)) as row_num
FROM dbo.Func_Split('12^', '^'))B on A.row_num=B.row_num
join
(SELECT Items,row_number() over (order by (select 0)) as row_num
FROM dbo.Func_Split('png^', '^'))C
on C.row_num=A.row_num
update ##Tmp830963 set User_Name=100update ##Tmp830963 set Job_Id='20' update ##Tmp830963 set User_id='1' select * from ##Tmp830963 DECLARE #OutputTbl TABLE (ID uniqueidentifier) INSERT INTO [Uploads] ([file_stream],[name]) OUTPUT INSERTED.stream_id INTO #OutputTbl(ID) select cast(N'' as xml).value('xs:base64Binary(sql:variable(Files))', 'varbinary(max)') ,Files_Name from ##Tmp830963 while (select count(*) from #OutputTbl) > 0 begin INSERT INTO [dbo].[FileDescriptions] ([User_ID] ,[FileName],Stream_id,[Size],Job_Id) select [User_id] ,cast((select MAX(ID) from #OutputTbl ) as nvarchar(max) ),(select MAX(ID) from #OutputTbl) , Files_Size , Job_Id from ##Tmp830963 where RowId=(select top 1(RowId) from ##Tmp830963) delete #OutputTbl where ID =(select MAX(ID) from #OutputTbl ) end
But I get this error:
XQuery [value()]: A string literal was expected
On this line:
cast(N'''' as xml).value(''xs:base64Binary(sql:variable(Files))''
On the first sight this looks a bit weird. I'm pretty sure, that this can be solved differently.
You are trying to transfer base64 to varbinary, correct?
Well, the XML-approach is the recommended way to achieve this. However, your problem seems to be situated here:
cast(N'''' as xml).value(''xs:base64Binary(sql:variable(Files))''
This is your attempt to create the needed statement dynamically, hence the doubled single quotes. This will translate to
cast(N'' as xml).value('xs:base64Binary(sql:variable(Files))'
The problem here is: sql:variable() expects the name of a declared variable as a string literal. Furthermore, value() expects two arguments: The Xpath and the data type. You'd need something like this
cast(N'' as xml).value('sql:variable("#Files")','varbinary(max)')
Might be you need sql:column("SomeColumn"), which allows to use the value of a column.
One example to demonstrate this
--We use FOR XML to get the binary 0x1234 as base64
SELECT 0x1234 FOR XML PATH(''); --returns "EjQ="
--Now I place the corresponding base64 string in a string variable
DECLARE #base64 VARCHAR(100)='EjQ=';
--And this is, how XML is returning the binary 0x1234 from the base64-string
SELECT CAST('' AS XML).value('sql:variable("#base64")','varbinary(max)');
And this would return the same and looks a bit easier:
SELECT CAST(#base64 AS XML).value('.','varbinary(max)');
Check this for a tabular approach
The table #base64 is a declared table variable, just use your original table instead:
--Declare a table variable with one string column and insert the base64 of 0x1234
DECLARE #base64 TABLE(Files VARCHAR(100));
INSERT INTO #base64 VALUES('EjQ=');
--use the same trick as above to get the varbinary back
SELECT CAST(Files AS XML).value('.','varbinary(max)')
FROM #base64;
I want to generate JSON using T-SQL and stored it in a variable. When I assign the JSON to a NVARCHAR(MAX) the data is being truncated, But it is not being truncated if I directly use the select statement.
DECLARE #DateOne DATETIME, #DateTwo DATETIME,#DrillThroughData NVARCHAR(MAX)
SELECT #DateOne = '2016-01-01',#DateTwo = '2017-07-31'
CREATE TABLE #DrillThroughData
(
[Date] DATE
,TotalAmountIncome DECIMAL(18,4)
,TotalAmountRefunded DECIMAL(18,4)
,ProductCostIncome DECIMAL(18,4)
,ProductCostRefunded DECIMAL(18,4)
)
INSERT INTO #DrillThroughData
VALUES('2017-07-13',839.2000,-241.4000,0.0000,0.0000)
;WITH CTE AS
(
SELECT 1 SNo,CAST(CONVERT(NVARCHAR(6),#DateOne,112)+'01' AS DATE) AS StartDate
UNION ALL
SELECT SNo+1 SNo,DATEADD(DAY,1,StartDate ) StartDate
FROM CTE WHERE DATEADD(Day,1,StartDate ) <= #DateTwo
)
SELECT StartDate [Date],SNo
INTO #AllDays
FROM CTE
OPTION(MAXRECURSION 0)
-- Data not truncated here.
SELECT ad.[Date]
,FORMAT(ad.[Date],'yyyy-MMM') [Month]
,ISNULL(d.TotalAmountIncome,0) TotalAmountIncome
,ISNULL(d.TotalAmountRefunded,0) TotalAmountRefunded
,ISNULL(d.ProductCostIncome,0) ProductCostIncome
,ISNULL(d.ProductCostRefunded,0) ProductCostRefunded
FROM #DrillThroughData d
RIGHT JOIN #AllDays ad
ON d.[Date] = ad.Date
ORDER BY SNo
FOR JSON AUTO
SET #DrillThroughData = (SELECT ad.[Date]
,FORMAT(ad.[Date],'yyyy-MMM') [Month]
,ISNULL(d.TotalAmountIncome,0) TotalAmountIncome
,ISNULL(d.TotalAmountRefunded,0) TotalAmountRefunded
,ISNULL(d.ProductCostIncome,0) ProductCostIncome
,ISNULL(d.ProductCostRefunded,0) ProductCostRefunded
FROM #DrillThroughData d
RIGHT JOIN #AllDays ad
ON d.[Date] = ad.Date
ORDER BY SNo
FOR JSON AUTO)
-- Data is being Truncated even though the varaible is nvarchar(max)
SELECT #DrillThroughData
The actual length of the json when not truncated is 88441, but it is truncated at 13680.
Thanks in advance.