Add range to an array with JSON_MODIFY - sql

I am trying to add an array to another array using JSON_MODIFY.
The situation is, I have an array kind stored json data in database. It looks like this:
declare #base nvarchar(max) = '[{"name":"base"}]';
And I am getting another set of data which is also in the shape of array:
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
I am trying to use JSON_MODIFY and JSON_QUERY magics to append them together but it gives me unexpected results.
declare #base nvarchar(max) = '[{"name":"base"}]';
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
set #base = JSON_MODIFY(#base,'append $',JSON_QUERY(#test1));
select #base;
Output:
[{"name":"base"}, [{"name":"test1"},{"name":"example1"}]]
But what I want is using those methods to make it work like kind of Add-Range:
[{"name":"base"},{"name":"test1"},{"name":"example1"}]
I am kind of lost on this process and I don't know where to look at for this kind of functionality.
I will use this from a C# service to directly modify through the code. That's why I cannot use Store procedures and functions as well.
Edit #1:
With regarding to reply from #Salman A, i appreciate your answer but the thing is, as i said earlier, it is a little bit difficult to use on my query running through code. Which is:
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
UPDATE dbo.ExampleTable
SET [Data] = JSON_MODIFY([Data], 'append $', JSON_QUERY(#test1))
WHERE [UniqueId] = 'some_guid_here'
I have tried it to adapt the answer that i like this :
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
UPDATE dbo.ExampleTable
SET [Data] = (
select [Data] = JSON_MODIFY([Data],'append $',item)
from OPENJSON(#test1)
with ([item] nvarchar(max) '$' as JSON)
)
WHERE [UniqueId] = 'some_id'
Actually, it works if #test1 only have 1 item, but in case of having more than 1 item in #test1, it gives the error:
Subquery returned more than 1 value. This is not permitted when the subquery follows = .....
What is the logical way to use this in a update set subquery

You can use OPENJSON to convert the array to rows and append items one by one:
declare #base nvarchar(max) = '[{"name":"base"}]';
declare #test1 nvarchar(max) = '[{"name":"test1"},{"name":"example1"}]';
select #base = json_modify(#base, 'append $', item)
from openjson(#test1)
with ([item] nvarchar(max) '$' as json);
select #base;
Returns:
[{"name":"base"},{"name":"test1"},{"name":"example1"}]
Revised answer for update query
If you're using SQL Server 2017+ then a reasonably safe solution is to concatenate the array using STRING_AGG but build individual rows using JSON functions. It is relatively easy to use this idea in an update query:
DECLARE #base NVARCHAR(MAX) = '[{"name":"base"}]';
DECLARE #test NVARCHAR(MAX) = '[{"foo":"bar"},{"baz":"meh"}]';
SELECT '[' + STRING_AGG(jsonstr, ',') WITHIN GROUP (ORDER BY pos) + ']'
FROM (
SELECT value, 1000 + [key] FROM OPENJSON(#base)
UNION ALL
SELECT value, 2000 + [key] FROM OPENJSON(#test)
) AS x(jsonstr, pos);
Alternately, you can use a recursive CTE that calls JSON_MODIFY multiple times to build the JSON; you can use the result in update query:
CREATE TABLE t(
id INT NOT NULL PRIMARY KEY IDENTITY,
data NVARCHAR(MAX)
);
INSERT INTO t(data) VALUES
('[{"name":"1.1"}]'),
('[{"name":"2.1"},{"name":"2.2"}]');
WITH rows(data, pos) AS (
SELECT value, [key]
FROM OPENJSON('[{"foo":"bar"},{"baz":"meh"}]')
), rcte(id, data, pos) AS (
SELECT id, data, -1
FROM t
UNION ALL
SELECT prev.id, JSON_MODIFY(prev.data, 'append $', JSON_QUERY(curr.data)), prev.pos + 1
FROM rcte AS prev
JOIN rows AS curr ON curr.pos = prev.pos + 1
)
UPDATE t
SET data = (
SELECT TOP 1 data
FROM rcte
WHERE id = t.id
ORDER BY pos DESC
);
Demo on db<>fiddle

Related

Update json in same row multiple times with for different JSON paths

I need to update a JSON data present in a row, with multiple updates to the same row. Below is the kind of json
{
"secondaries": [
{
"secondaryId": 1,
"primaries": [
{
"primary": 1,
"status": "UNKNOWN"
},
{
"primary": 2,
"status": "UNKNOWN"
}
]
}
]
}
CREATE TABLE testing(
id VARCHAR(100),
json nvarchar(max)
);
INSERT INTO testing values('123', '{"secondaries":[{"secondaryId":1,"primaries":[{"primary":1,"status":"UNKNOWN"},{"primary":2,"status":"UNKNOWN"}]}]}');
I want to update status for all the primary as PASSED. So I first created a CTE
with cte as (select id,
CONCAT('$.secondaries[', t.[key], ']', '.primaries[', t2.[key],
']') as primaryPath
from testing
cross apply openjson(json, '$.secondaries') as t
cross apply openjson(t.value, '$.primaries') as t2
where id = '123'
and json_value(t.value, '$.secondaryId') = 1
)
select * from cte;
This gives me below results
Now if I try and update the records using below sql query:
with cte as (select id,
CONCAT('$.secondaries[', t.[key], ']', '.primaries[', t2.[key],
']') as primaryPath
from testing
cross apply openjson(json, '$.secondaries') as t
cross apply openjson(t.value, '$.primaries') as t2
where id = '123'
and json_value(t.value, '$.secondaryId') = 1
)
update testing
set json = JSON_MODIFY(json, cte.primaryPath + '.status', 'PASSED')
from testing
cross join cte
where cte.id = testing.id;
select * from testing;
Only one of the records gets updated. I want all the records to get update. How can I achieve the same?
http://sqlfiddle.com/#!18/b61e1/6
I do have a working solution to do it, but it is not a query based one. I am looking for a possibility to do it just via the query itself
OPEN #getid
FETCH NEXT
FROM #getid INTO #id, #primaryPath
WHILE ##FETCH_STATUS = 0
BEGIN
update testing
set json = JSON_MODIFY(json, #primaryPath + '.status', 'PASSED')
where testing.id = #id
FETCH NEXT
FROM #getid INTO #id, #primaryPath
END
CLOSE #getid
DEALLOCATE #getid
If you don't want to rebuild the whole JSON, you can use nested JSON_MODiFY calls.
It gets more complicated because of the doubly nested arrays, you need to use STRING_AGG also, and JSON_QUERY to prevent double-escaping.
UPDATE t
SET json = JSON_MODIFY(t.json, '$.secondaries', JSON_QUERY(j1.secondaries_new))
FROM testing t
CROSS APPLY (
SELECT '[' + STRING_AGG(JSON_MODIFY(secondaries.value, '$.primaries', JSON_QUERY(j2.primaries_new)), ',') + ']'
FROM OPENJSON(t.json, '$.secondaries') secondaries
CROSS APPLY (
SELECT '[' + STRING_AGG(JSON_MODIFY(primaries.value, '$.status', 'PASSED'), ',') + ']'
FROM OPENJSON(secondaries.value, '$.primaries') primaries
) j2(primaries_new)
) j1(secondaries_new);
db<>fiddle
The JSON_MODIY() function doesn't support wild cards for the path parameter, so possible options (...query based...) are:
Parse, modify and build the JSON content for each row (this approach expects a known and fixed JSON structure).
Generate and execute a dynamic statement with nested JSON_MODIFY() calls.
Parse, modify and build the JSON content for each row:
DECLARE #id varchar(100) = '123'
DECLARE #secondaryId int = 1
UPDATE testing
SET json = (
SELECT
secondaryId,
primaries = CASE
WHEN secondaryId = #secondaryId THEN
(
SELECT [primary], 'PASSED' AS status
FROM OPENJSON(_primaries) WITH ([primary] int '$.primary')
FOR JSON PATH
)
ELSE JSON_QUERY(_primaries)
END
FROM OPENJSON(json, '$.secondaries') WITH (
secondaryId int '$.secondaryId',
_primaries nvarchar(max) '$.primaries' AS JSON
)
FOR JSON PATH, ROOT('secondaries')
)
WHERE (id = #id)
Dynamic statement:
DECLARE #stmt nvarchar(max) = N''
DECLARE #id varchar(100) = '123'
DECLARE #secondaryId int = 1
; WITH cte AS (
SELECT
id,
CONCAT('$.secondaries[', j1.[key], ']', '.primaries[', j2.[key],']') AS primaryPath
FROM testing
CROSS APPLY OPENJSON(json, '$.secondaries') AS j1
CROSS APPLY OPENJSON(j1.[value], '$.primaries') AS j2
WHERE (id = #id) AND JSON_VALUE(j1.[value], '$.secondaryId') = #secondaryId
)
SELECT
#stmt = CONCAT(
N'UPDATE testing SET json = ',
STRING_AGG(N'JSON_MODIFY(', N''),
N'json',
STRING_AGG(CONCAT(N',''', primaryPath, N'.status'',''PASSED'')'), N''),
N' WHERE id = #id'
)
FROM cte
DECLARE #err int
EXEC #err = sp_executesql #stmt, N'#id varchar(100)', #id
IF #err = 0 PRINT 'Success' ELSE PRINT 'Error'

Write a stored procedure which takes input string and split them by characters

I am trying to create a stored procedure in SQL Server which will be used from C# Entity Framework.
My main focus is take a input of long string text then split that data by characters and return list of values of matching data.
In detail:
- is separator between name of data type and its value
: is separator between Type and ASIN
, is separator between two different value
I want to get List of data filtered by ASIN and Type from this stored procedure. I am getting full text string in the #DataString variable but I don't know how I can split my text and run SELECT to return all of data.
Any idea to do it? Ask any question you may have.
Example of long text string:
Type-1:ASIN-NsQf8,ASIN-YhQfu,ASIN-dpQf9,ASIN-rsWf3
The unfinished SQL code:
CREATE PROCEDURE dbo.lk_GetMatchingDataOfThirdparty
#DataString VARCHAR(MAX)
AS
BEGIN
SET NOCOUNT ON;
SELECT *
FROM ThirdPartyData
WHERE ASIN = '#value_get_from_string'
AND Type = '#value_get_from_string'
END
Use a split function:
CREATE FUNCTION [dbo].[split](
#delimited NVARCHAR(MAX),
#delimiter NVARCHAR(100)
) RETURNS #t TABLE (id INT IDENTITY(1,1), val NVARCHAR(MAX))
AS
BEGIN
DECLARE #xml XML
SET #xml = N'<t>' + REPLACE(#delimited,#delimiter,'</t><t>') + '</t>'
INSERT INTO #t(val)
SELECT r.value('.','nvarchar(MAX)') as item
FROM #xml.nodes('/t') as records(r)
RETURN
END
GO
DECLARE #DataString VARCHAR(MAX);
SET #DataString ='Type-1:ASIN-NsQf8,ASIN-YhQfu,ASIN-dpQf9,ASIN-rsWf3'
;WITH cte as (
SELECT a.id as [1], b.id as [2], c.id as [3], c.val
FROM (
SELECT * FROM dbo.split(#DataString, ':')
) a
CROSS APPLY dbo.split(a.val,',') b
CROSS APPLY dbo.split(b.val,'-') c
),
typecte as (
select b.val as [TypeValue]
from cte a
inner join cte b
ON a.[1] = b.[1]
AND a.[2] = b.[2]
AND a.[3]+1 = b.[3] -- Next value
WHERE a.val='Type'
),
asincte as (
select b.val as [ASINValue]
from cte a
inner join cte b
ON a.[1] = b.[1]
AND a.[2] = b.[2]
AND a.[3]+1 = b.[3] -- Next value
WHERE a.val='ASIN'
)
SELECT *
FROM ThirdPartyData
WHERE [ASIN] IN (SELECT [ASINValue] FROM asincte)
AND [Type] IN (SELECT [TypeValue] FROM typecte)
An oft overlooked way of doing coding that isn't really relational database related is a SQL CLR. String manipulation is a good example of something a SQL CLR could handle much better than a SQL script. What you could do in the above example is call a SQL CLR function that does the string manipulation on the long string text to return the values you need then plug those variables into your SELECT statement.

Local variable with multiple value list

I use Excel connection to connect to SQL Server to query data from SQL server to Excel.
I have below WHERE clause in the Excel connection couple times. I need to replace the WHERE multiple value list from time to time. To simply the replacement, I want to use a local parameter, #Trans. With the local parameter, I can change it only and all SQL will use it to query.
WHERE Type in ('R','D','C')
If it is single option, below code works.
DECLARE #TRans CHAR(200)= 'R';
SELECT .....
WHERE Type in (#Trans)
If it is multiple options, the below code does not works
DECLARE #TRans CHAR(200)= 'R,D,C';
SELECT .....
WHERE Type in (#Trans)
DECLARE #TRans CHAR(200)= '''R'''+','+'''D'''+','+'''C''';
SELECT .....
WHERE Type in (#Trans)
How to declare #Trans for multiple value list, for example ('R','D','C')? Thank you.
You can use dynamic sql
DECLARE #TRans VARCHAR(200)= '''R'',''D'',''C''';
DECLARE #sql VARCHAR(MAX) = '';
SET #sql = 'SELECT * FROM table WHERE Type in (' + #Trans + ');'
EXEC #sql
Take note of the quotes for the values in #TRans since these character values.
If you want to check the value of #sql which you will see the constructed sql statement, replace EXEC #sql with PRINT #sql.
Result of #sql
SELECT * FROM table WHERE Type in ('R','D','C');
As you can see by now, SQL Server does NOT support macro substition. This leaves a couple of options. One is to split the string.
If not 2016, here is a quick in-line approach which does not require a Table-Valued Function
Example
Declare #Trans varchar(max)='R,D,C' -- Notice no single quotes
Select ...
Where Type in (
Select RetVal = LTrim(RTrim(B.i.value('(./text())[1]', 'varchar(max)')))
From (Select x = Cast('<x>' + replace(#Trans,',','</x><x>')+'</x>' as xml).query('.')) as A
Cross Apply x.nodes('x') AS B(i)
)
You can create a table named LocalParameter and keep local variables there. You can only get datas by updating LocalParameter table without changing the queries.
CREATE TABLE LocalParameter (Trans VARCHAR(MAX))
INSERT INTO LocalParameter
VALUES
(
',R,'
)
With LIKE you can use it like this:
SELECT .....
WHERE (SELECT TOP 1 A.Trans FROM LocalParameter A) LIKE ',' + Type + ','
To change WHERE clause:
UPDATE LocalParameter
SET Trans = ',R,D,C,'
Queries:
SELECT .....
WHERE (SELECT TOP 1 A.Trans FROM LocalParameter A) LIKE ',' + Type + ','
Local variables are added to the beginning and end of the comma.
You can use a split method to split csv values as shown below
DECLARE #delimiter VARCHAR(10)=','
DECLARE #input_string VARCHAR(200)='R,D,C'
;WITH CTE AS
(
SELECT
SUBSTRING(#input_string,0,CHARINDEX(#delimiter,#input_string)) AS ExtractedString,
SUBSTRING(#input_string,CHARINDEX(#delimiter,#input_string) + 1,LEN(#input_string)) AS PartString
WHERE CHARINDEX(#delimiter,#input_string)>0
UNION ALL
SELECT
SUBSTRING(PartString,0,CHARINDEX(#delimiter,PartString)) AS ExtractedString,
SUBSTRING(PartString,CHARINDEX(#delimiter,PartString)+1,LEN(PartString)) AS PartString
FROM CTE WHERE CHARINDEX(#delimiter,PartString)>0
)
SELECT ExtractedString FROM CTE
UNION ALL
SELECT
CASE WHEN CHARINDEX(#delimiter,REVERSE(#input_string))>0
THEN REVERSE(SUBSTRING(REVERSE(#input_string),0,CHARINDEX(#delimiter,REVERSE(#input_string))))
ELSE #input_string END
OPTION (MAXRECURSION 0)
This split method doesnt have any loops so it will be fast. then you integrate this with your query as below mentioned
DECLARE #delimiter VARCHAR(10)=','
DECLARE #input_string VARCHAR(200)='R,D,C'
;WITH CTE AS
(
SELECT
SUBSTRING(#input_string,0,CHARINDEX(#delimiter,#input_string)) AS ExtractedString,
SUBSTRING(#input_string,CHARINDEX(#delimiter,#input_string) + 1,LEN(#input_string)) AS PartString
WHERE CHARINDEX(#delimiter,#input_string)>0
UNION ALL
SELECT
SUBSTRING(PartString,0,CHARINDEX(#delimiter,PartString)) AS ExtractedString,
SUBSTRING(PartString,CHARINDEX(#delimiter,PartString)+1,LEN(PartString)) AS PartString
FROM CTE WHERE CHARINDEX(#delimiter,PartString)>0
)
SELECT * FROM [YourTableName] WHERE Type IN
(SELECT ExtractedString FROM CTE
UNION ALL
SELECT
CASE WHEN CHARINDEX(#delimiter,REVERSE(#input_string))>0
THEN REVERSE(SUBSTRING(REVERSE(#input_string),0,CHARINDEX(#delimiter,REVERSE(#input_string))))
ELSE #input_string END
)OPTION (MAXRECURSION 0)
If possible add a new table and then join to it in all your queries:
CREATE TABLE SelectedType
(
[Type] CHAR(1) PRIMARY KEY
)
INSERT INTO SelectedType
VALUES ('R','D','C')
Then your queries become:
SELECT *
FROM MyTable MT
INNER JOIN SelectedType [ST]
ON ST.[Type] = MT.[Type]
If you need to add, update or delete types then update the rows in SelectedType table.
This has the benefit of using SET BASED queries, is easy to understand and easy to add, update or delete required types.

T-SQL Pivot Row to Column

I am using SQL Server 2012 and have a table that has the following columns:
ID, Date, CustomFieldName, CustomFieldValue
The CustomFieldName column has 100 values (I know how stupid this sounds) but for the sake of simplicity lets say they are CustomField1, CustomField2, CustomField3
I would like to create a pivot where the out put looks like
ID, Date, CustomField1, CustomField2, CustomField3 where the Max date of CustomFieldVaue's is aggregated.
I have failed horribly in this, but have some progress (though my max isnt right and getting a lot of wrong data)
Any help would be appreciated!
SELECT [date],[id], [CustomField1], [CustomField2], [CustomField3]
from
(
SELECT [date], [id], [CustomFieldValue], [CustomFieldName],
row_number() over(partition by [CustomFieldName] order by [CustomFieldValue]) rn
from CustomTable
) as st
pivot
(
max([CustomFieldValue])
FOR [CustomFieldName] in ([CustomField1], CustomField2, [CustomField3])
) as pivottable
order by [id]
Hope I got it right, you want to pivot the rows (COlumnName1,2,...etc) as columns, so I've made a little script that's ready to run.
I recommend CTE's when it comes to pivoting, makes it easier, if you want to see the whole structure of the query just do a select #xSqlString
set nocount on;
create table
#testTable
(
ID int identity(1,1),
[Date] datetime default getdate(),
CustomFieldName nvarchar(50),
CustomFieldValue date
);
declare
#i int = 0,
#xSqlStringPivot nvarchar(max) = '',
#xSqlString nvarchar(max) = '';
while(#i<=100)
begin
set
#xSqlStringPivot += concat('CustomFieldName',cast(#i as nvarchar(50)),char(13), case when #i<100 then ', ' else '' end);
insert into #testTable
(
CustomFieldName,
CustomFieldValue
)
values
(
concat('CustomFieldName', cast(#i as nvarchar(50))),
dateAdd(day,-#i,getdate())
);
set
#i += 1;
end;
select * from
#testTable
set
#xSqlString =
(
'with ctePiv as
(
select
t.CustomFieldName,
t.CustomFieldValue
from
#testTable t
)
select
*
from
ctePiv
pivot
(
max(customFieldValue) for customFieldName in
(
'+ #xSqlStringPivot +'
)
)p'
);
exec sp_executeSQL #xSqlString
drop table #testTable;
Edit 1
I am referencing the custom table on the while block, basically I'm iterating 100 times to populate the table with 100 rows. This is just to simulate your case.
while(#i<=100)
begin
set
#xSqlStringPivot += concat('CustomFieldName',cast(#i as nvarchar(50)),char(13), case when #i<100 then ', ' else '' end);
insert into #testTable
(
CustomFieldName,
CustomFieldValue
)
values
(
concat('CustomFieldName', cast(#i as nvarchar(50))),
dateAdd(day,-#i,getdate())
);
set
#i += 1;
end;
#xSqlStringPivot is just a small trick to make a list of elements (CustomFieldName0, CustomFieldName1, etc) and to concatenate it to a dynamic SQL string, notice that I'm doing this in the while block, I just concatenate 'CustomField' with the current iteration number and with a carry feed (space).

Compare two list items

I am trying to compare a database field which stores list items (comma separated) with unfortunately a variable which is also a list item.
Example:
In this case, a user can belong to multiple groups, and content access is also allocated to multiple groups.
contentid | group
(1) (c,d)
(2) (a,c)
(3) (b)
So, I need to select all content where user is in group (a,c). In this case, contentid 1,2 should be returned.
Here's a safe but slow solution for SQL 2008
BEGIN
-- setup
DECLARE #tbl TABLE (
[contentid] INT
,[group] VARCHAR(MAX)
)
INSERT INTO #tbl VALUES
(1, 'c,d')
,(2, 'a,c')
,(3, 'd')
-- send your request as simple xml
DECLARE #param XML
SET #param = '<g>a</g><g>c</g>'
-- query
SELECT DISTINCT contentid
FROM #tbl t
INNER JOIN #param.nodes('/g') AS t2(g)
ON ',' + t.[group] + ',' LIKE '%,' + t2.g.value('.', 'varchar(max)') + ',%'
END
You just pass your query in as an XML snippet instead of a comma separated list.
If your group names are single characters or you can be sure the names are not character-subsets of each other (ie: GroupA, GroupAB), then the query can be optimized to.
ON t.[group] LIKE '%' + t2.g.value('.', 'varchar(max)') + '%'
If you're using a RDBMS without XML parsing capability you'll have to use string split your query into a temp table and work it that way.
You really should not be using comma separated values inside your columns. It would be much better if the [group] column only contained one value and you had repeated entries with a UNIQUE constraint on the composite (contentid, group).
You might find this question and answer useful : How do I split a string so I can access item x?
Or you could always use something like this :
create function SplitString(
#string varchar(max),
#delimiter char(1)
)
returns #items table (item varchar(max))
as
begin
declare #index int set #index = 0
if (#delimiter is null) set #delimiter = ','
declare #prevdelimiter int set #prevdelimiter = 0
while (#index < len(#string)) begin
if (substring(#string, #index, 1) = #delimiter) begin
insert into #items
select substring(#string, #prevdelimiter, #index-#prevdelimiter)
set #prevdelimiter = #index + 1
end
set #index = #index + 1
end
--last item (or only if there were no delimiters)
insert into #items
select substring(#string, #prevdelimiter, #index - #prevdelimiter + 1)
return
end
go
declare #content table(contentid int, [group] varchar(max))
insert into #content
select 1, 'c,d'
union
select 2, 'a,c'
union
select 3, 'b'
declare #groups varchar(max) set #groups = 'a,c'
declare #grouptable table(item varchar(max))
insert into #grouptable
select * from dbo.SplitString(#groups, ',')
select * From #content
where (select count(*) from #grouptable g1 join dbo.SplitString([group], ',') g2 on g1.item = g2.item) > 0