SQL query construction as json object from multiple column - sql

I am try to get result like combine multiple column as single column in result but not got success.
how to get exepected result?
query:
select SchemaName,([create],[read],[update],[delete]) as [Permissions] from ApplicationRoles for json path;
Exepected Result:
{
"SchemaName": "TestSchema",
"Permissions": {
"create": true,
"read": true,
"update": true,
"delete": true
}
}
Thanks in advance.

Put the values you want in an additional array in a subquery:
CREATE TABLE dbo.YourTable(SchemaName sysname,
[create] bit,
[read] bit)
INSERT INTO dbo.YourTable
VALUES ('TestSchema',1,1);
GO
SELECT YT.SchemaName,
(SELECT YT.[create],
YT.[read]
FOR JSON PATH) AS Permissions --No WITHOUT_ARRAY_WRAPPER as this will result in "Permissions": "{\"create\":true,\"read\":true}"
FROM dbo.YourTable YT
FOR JSON AUTO, WITHOUT_ARRAY_WRAPPER;
GO
DROP TABLE dbo.YourTable;

Related

How to get data from json column in SQL Server that starts with array element

I have a database table that I need to extract data from where the column of interest has json it. What makes this particularly difficult is the most outer elements of the json is '[' & ']' as in the parent element is an array. I need to get the value associated with key 'Name' (which in this case is 'MS220'). However, I'm not able to path correctly to the key I want.
The below JData table is a duplicate copy of the data I need to perform the extract on. Between SELECT OPENJSON, JSON_VALUE, JSON_QUERY etc., how can I retrieve the value I'm looking for?
Below is a couple of selects I've tried but not quite getting it.
CREATE TABLE JData
(
JsonData nvarchar(max)
)
INSERT INTO JData (JsonData)
VALUES
('[
{
"Categories": [
{
"QuerySourceNames": [
"QAsset"
],
"Id": "eceae85a-ffc6-49f4-8f6a-78ce2b4b274e",
"Name": "emsdba"
}
],
"Id": "525b4f07-0f67-43ac-8070-a0e6c1ceb1b9",
"Name": "MS220"
}
]')
SELECT *
FROM OPENJSON (JData,'$.[0]')
WITH (
[Name] varchar(10) '$.Name'
)
SELECT
JSON_VALUE(JData,'$') as v
#AaronBertrand: I had to modify the answer a little since the table also has a column labeled [name] as well. Is there a way to UPDATE ParamName to a new value?
SELECT
t.[Name],
ParamName
FROM
[myDB].[dbo].[myTable] t
CROSS APPLY
OPENJSON (t.params)
WITH
(
Categories nvarchar(max) AS json,
Id uniqueidentifier,
ParamName varchar(10) '$.Name'
);
SELECT Name FROM dbo.JData
CROSS APPLY OPENJSON (JsonData)
WITH
(
Categories nvarchar(max) AS json,
Id uniqueidentifier,
[Name] varchar(10)
);
Example db<>fiddle

JSON_Query with For Json Path

Please see the table given below. The table contains the json string and need to create a json array with those json string. But When I use JSON_Query and For Json Path it adds additional header. (Alias name or the source column name). How to generate the json array without alias name or source column name.
Please see the example given below.
DECLARE #jsonTbl TABLE (id INT,json VARCHAR(MAX))
INSERT INTO #jsonTbl (id,json) VALUES (1,'{"id":"1A", "names":{"firstname":"Name1"}}')
INSERT INTO #jsonTbl (id,json) VALUES (1,'{"id":"2A", "names":{"firstname":"Name2"}}')
SELECT JSON_QUERY(json) AS 'someName'
FROM #jsonTbl
FOR JSON AUTO
--When I use the above select query it returns the data as
[{"SomeName":{"id":"1A", "names":{"firstname":"Name1"}}},{"SomeName":{"id":"2A", "names":
{"firstname":"Name2"}}}]
Formatted JSON
```[
{
"someName":{
"id":"1A",
"names":{"firstname":"Name1"}
}
},
{
"someName":{
"id":"1B",
"names":{
"firstname":"Name1"
}
}
}
]
--But need the result as follows. Do not need someName
[
{
"id":"1A",
"names":{
"firstname":"Name1"
}
},
{
"id":"2A",
"names":{
"firstname":"Name2"
}
}
]```
You can use OPENJSON() together with CROSS APPLY
SELECT j.[id], j.[names]
FROM #jsonTbl t
CROSS APPLY OPENJSON(t.json, '$') WITH ([id] VARCHAR(100),
[names] NVARCHAR(MAX) AS JSON) j
FOR JSON AUTO
Demo

Saving JSON file to SQL Server Database tables

I am having a nested JSON file as shown below (where condition and rules can be nested to multiple levels)
{
"condition": "and",
"rules": [
{
"field": "26",
"operator": "=",
"value": "TEST1"
},
{
"field": "36",
"operator": "=",
"value": "TEST2"
},
{
"condition": "or",
"rules": [
{
"field": "2",
"operator": "=",
"value": 100
},
{
"field": "3",
"operator": "=",
"value": 12
},
{
"condition": "or",
"rules": [
{
"field": "12",
"operator": "=",
"value": "CA"
},
{
"field": "12",
"operator": "=",
"value": "AL"
}
]
}
]
}
]
}
I want to save this JSON (conditon and rules fields in json file can be nested to multiple levels) in to SQL Server Tables and later wanted to construct the same JSON from these created tables. How can i do this ? From these table i am planning to get other json formats also that is why decided to split the json to table columns.
I think need to create a recursive sql function to do same.
i have created following tables to save the same json .
CREATE TABLE [Ruleset]
([RulesetID] [BIGINT] IDENTITY(1, 1) NOT NULL PRIMARY KEY,
[Condition] [VARCHAR](50) NOT NULL,
[ParentRuleSetID] [BIGINT] NULL
);
GO
CREATE TABLE [Rules]
([RuleID] [BIGINT] IDENTITY(1, 1) NOT NULL PRIMARY KEY,
[Fields] [VARCHAR](MAX) NOT NULL,
[Operator] [VARCHAR](MAX) NOT NULL,
[Value] [VARCHAR](MAX) NOT NULL,
[RulesetID] [BIGINT] NULL
FOREIGN KEY REFERENCES [Ruleset](RulesetID)
);
insert script as follows,
INSERT INTO [Ruleset] values
('AND',0),
('OR',1),
('OR',2)
INSERT INTO [Rules] values
('26','=','TEST1',1),
('364','=','TEST2',1),
('2','=','100',2),
('3','=','12',2),
('12','=','CA',3),
('12','=','AL',3)
Will these tables are enough? Will be able to save all details?
Attaching the values that i have added to these tables manually.
How can i save this JSON to these table and later will construct the same JSON from these tables via stored procedure or queries ?
please provide suggestions and samples!
Actually you can declare the column type as NVARCHAR(MAX), and save the json string into it.
As JSON case sensitive please check your schema definition and sample data. I see a discrepancy between the definition of the tables, their contents and your JSON
All scripts tested on MS SQL Server 2016
I used a temporary table variable in this script, but you can do without it. See an example in SQL Fiddle
-- JSON -> hierarchy table
DECLARE #ExpectedJSON NVARCHAR(MAX) = '
{
"condition": "and",
"rules": [
{
"field": "26",
"operator": "=",
"value": "TEST1"
},
{
"field": "36",
"operator": "=",
"value": "TEST2"
},
{
"condition": "or",
"rules": [
{
"field": "2",
"operator": "=",
"value": 100
},
{
"field": "3",
"operator": "=",
"value": 12
},
{
"condition": "or",
"rules": [
{
"field": "12",
"operator": "=",
"value": "CA"
},
{
"field": "12",
"operator": "=",
"value": "AL"
}
]
}
]
}
]
}
'
DECLARE #TempRuleset AS TABLE
(RulesetID BIGINT NOT NULL PRIMARY KEY,
condition VARCHAR(50) NOT NULL,
ParentRuleSetID BIGINT NOT NULL,
RulesJSON NVARCHAR(MAX)
)
;WITH ParseRuleset AS (
SELECT 1 AS RulesetID,
p.condition,
p.rules,
0 AS ParentRuleSetID
FROM OPENJSON(#ExpectedJSON, '$') WITH (
condition VARCHAR(50),
rules NVARCHAR(MAX) AS JSON
) AS p
UNION ALL
SELECT RulesetID + 1,
p.condition,
p.rules,
c.RulesetID AS ParentRuleSetID
FROM ParseRuleset AS c
CROSS APPLY OPENJSON(c.rules) WITH (
condition VARCHAR(50),
rules NVARCHAR(MAX) AS JSON
) AS p
where
p.Rules IS NOT NULL
)
INSERT INTO #TempRuleset (RulesetID, condition, ParentRuleSetID, RulesJSON)
SELECT RulesetID,
condition,
ParentRuleSetID,
rules
FROM ParseRuleset
-- INSEERT INTO Ruleset ...
SELECT RulesetID,
condition,
ParentRuleSetID,
RulesJSON
FROM #TempRuleset
-- INSERT INTO Rules ...
SELECT RulesetID,
field,
operator,
value
FROM #TempRuleset tmp
CROSS APPLY OPENJSON(tmp.RulesJSON) WITH (
field VARCHAR(MAX),
operator VARCHAR(MAX),
value VARCHAR(MAX)
) AS p
WHERE p.field IS NOT NULL
SQL Fiddle
Hierarchy tables -> JSON:
CREATE TABLE Ruleset
(RulesetID BIGINT IDENTITY(1, 1) NOT NULL PRIMARY KEY,
condition VARCHAR(50) NOT NULL,
ParentRuleSetID BIGINT NULL
);
GO
CREATE TABLE Rules
(RuleID BIGINT IDENTITY(1, 1) NOT NULL PRIMARY KEY,
field VARCHAR(MAX) NOT NULL,
operator VARCHAR(MAX) NOT NULL,
value VARCHAR(MAX) NOT NULL,
RulesetID BIGINT NULL FOREIGN KEY REFERENCES Ruleset(RulesetID)
);
INSERT INTO Ruleset values
('and',0),
('or',1),
('or',2)
INSERT INTO Rules values
('26','=','TEST1',1),
('36','=','TEST2',1),
('2','=','100',2),
('3','=','12',2),
('12','=','CA',3),
('12','=','AL',3)
-- hierarchy table -> JSON
;WITH GetLeafLevel AS
(
SELECT Ruleset.RulesetID,
Ruleset.condition,
Ruleset.ParentRuleSetID,
1 AS lvl,
( SELECT field,
operator,
value
FROM Rules
WHERE Rules.RulesetID = Ruleset.RulesetID
FOR JSON AUTO, WITHOUT_ARRAY_WRAPPER
) AS JSON_Rules
FROM Ruleset
WHERE ParentRuleSetID = 0
UNION ALL
SELECT Ruleset.RulesetID,
Ruleset.condition,
Ruleset.ParentRuleSetID,
GetLeafLevel.lvl + 1,
( SELECT field,
operator,
value
FROM Rules
WHERE Rules.RulesetID = Ruleset.RulesetID
FOR JSON AUTO, WITHOUT_ARRAY_WRAPPER
)
FROM Ruleset
INNER JOIN GetLeafLevel ON Ruleset.ParentRuleSetID = GetLeafLevel.RulesetID
),
-- SELECT * FROM GetLeafLevel -- debug
ConcatReverseOrder AS
(
SELECT GetLeafLevel.*,
CONCAT('{"condition":"',
GetLeafLevel.condition,
'","rules":[',
GetLeafLevel.JSON_Rules,
']}'
) AS js
FROM GetLeafLevel
WHERE GetLeafLevel.lvl = (SELECT MAX(lvl) FROM GetLeafLevel)
UNION ALL
SELECT GetLeafLevel.*,
CONCAT('{"condition":"',
GetLeafLevel.condition,
'","rules":[',
GetLeafLevel.JSON_Rules,
',',
ConcatReverseOrder.js,
']}'
) AS js
FROM GetLeafLevel
INNER JOIN ConcatReverseOrder ON GetLeafLevel.RuleSetID = ConcatReverseOrder.ParentRuleSetID
)
-- SELECT * FROM ConcatReverseOrder -- debug
SELECT js
FROM ConcatReverseOrder
WHERE ParentRuleSetID = 0
SQL Fiddle
I feel like I would need to know more about how you plan to use the data to answer this. My heart is telling me that there is something wrong about storing this information in MSSQL, if not wrong, problematic.
If i had to do it, I would convert these conditions into a matrix lookup table of rotatable events within your branch, so for each conceivable logic branch you could create a row in a lookup to evaluate this.
Depending out on your required output / feature set you can either do something like the above or just throw everything in a NVARCHAR as suggested by rkortekaas.
Your use case really does seem a perfect match for a NoSql Option such as MongoDb, Azure Table storage, or CosmosDB (CosmosDB can be pricey if you don't know your way round it).
Extract from MongoDB page:
In MongoDB, data is stored as documents. These documents are stored in
MongoDB in JSON (JavaScript Object Notation) format. JSON documents
support embedded fields, so related data and lists of data can be
stored with the document instead of an external table.
However, from here on I'm going to assume you are tied to SQL Server for other reasons.
You have stated that you are going to are just putting the document in and getting the same document out, so it doesn't make sense to go to the effort of splitting out all the fields.
SQL Server is much better at handling text fields than it used to be IMO.
Systems I've worked on before have had the following columns (I would write the sql, but I'm not at my dev machine):
Id [Primary Key, Integer, Incrementing index]
UserId [a Foreign Key to what this relates to - probably not 'user' in your case!]
Value [nvarchar(1000) contains the json as a string]
The lookup is easily done based on the foreign key.
However, suppose you want it to be more NoSql like, you could have:
Id [Primary Key, Integer, Incrementing index]
Key [nvarchar(100) a string key that you make, and can easily re-make for looking up the value (e.g. User_43_Level_6_GameData - this column should have an index]
Value [nvarchar(1000) contains the json as a string]
The reason I've kept to having an integer ID is to avoid fragmentation. You could obviously make the Value column bigger.
Json can easily be converted between a json object and a json string. In Javascript, you would use Json Parse and Stringify. If you are using C# you could use the following snippets, though there are many ways to do this task (the objects can be nested as deep as you like)
.NET Object to Json
Weather w = new Weather("rainy", "windy", "32");
var jsonString = JsonSerializer.Serialize(w);
Json to .NET Object (C#)
var w = JsonSerializer.Deserialize(jsonString);
UPDATE
Although this is the way I've done things in the past, it looks like there are new options in sql server to handle JSON - OPENJSON and JSONQUERY could be potential options, though I haven't used them myself - they still use nvarchar for the JSON column.

Reading JSON array as one of the SQL table column

I am trying to read a json array into a table, one of the nodes (Languages) in the array is an array in itself, and I am getting null for this particular column (Languages).
Below is the sample json:
DECLARE #json NVARCHAR(MAX) = '[
{
"Id":1,
"Name":"Test1",
"Languages":["L1", "L2"]
},
{
"Id":2,
"Name":"Test2",
"Languages":["L3", "L4"]
},
{
"Id":3,
"Name":"Test2",
"Languages":["L5", "L6"]
}]'
Below is the query I am using:
SELECT Id
, Name
, Languages
FROM OPENJSON(#json)
WITH (Id INT '$.Id'
, Name VARCHAR(20) '$.Name'
, Languages VARCHAR(200) '$.Languages')
Below is the current result:
However I need the result as below
What am I doing wrong? Please help.
You can use NVARCHAR(max) as json for Language item inside WITH clause.
From Microsoft docs (all the details can be found here):
If you don't specify AS JSON for a column, the function returns a
scalar value (for example, int, string, true, false) from the
specified JSON property on the specified path. If the path represents
an object or an array, and the property can't be found at the
specified path, the function returns null in lax mode or returns an
error in strict mode. This behavior is similar to the behavior of the
JSON_VALUE function.
So your query should look like this:
SELECT Id
, Name
, Languages
FROM OPENJSON(#json)
WITH (Id INT '$.Id'
, Name VARCHAR(20) '$.Name'
, Languages NVARCHAR(max) as json)
Results:
I hope maybe this query will be help you. Result is little bit different from you want.
DECLARE #json NVARCHAR(MAX) = '{"UserLang":[
{
"Id":1,
"Name":"Test1",
"Languages":["L1", "L2"]
},
{
"Id":2,
"Name":"Test2",
"Languages":["L3", "L4"]
},
{
"Id":3,
"Name":"Test2",
"Languages":["L5", "L6"]
}]}'
SELECT
JSON_VALUE(d.value,'$.Id') AS Id,
JSON_VALUE(d.value,'$.Name') AS Languages,
l.value AS Name
FROM OPENJSON(#json,'$.UserLang') AS d CROSS APPLY OPENJSON (d.value,'$.Languages') AS l

SQL Server: Update table based on JSON

I am trying to update rows in table based on JSON I have. JSON has the following structure:
"sensors": [
{
"id": "5afd7160f16819f11814f6e2",
"num": 0,
"name": "AC01",
"enabled": true,
"unit": "Volt AC Phase 1",
"desc": "NAMsdafE",
"lt_disaster": 1,
"gt_disaster": 1,
"lt_high": 1,
"gt_high": 1,
"lt_average": 1,
"gt_average": 1
},...
Table dbo.sensors has same structure + few more columns. To insert such JSON object, not array, into table, I would do it this way:
INSERT INTO dbo.sensors (.......)
SELECT .......
FROM OPENJSON(#json)
WITH (
id varchar(200),
....
);
So I have 2 questions: how to iterate over each element in JSON array and update each row with the same id. Any help would be appreciated:)
1) once you change the json into a select statement, you can iterate over that using cursor.
2) you can treat json select statement as a table. That said, you can do insert, update, delete operations exactly as you do with two tables. For updated case you can use code like this:
With Json_data as
( SELECT .......
FROM OPENJSON(#json)
WITH (
id varchar(200),
....
)
update S set ....
from dbo.sensors as S
inner join Json_data as JD on JD.id = S.id
First, read documentation OPENJSON. This feature is available starting version 2016.
Next, apply new knowledge.
--truncated for shortness
--note: wrap JSON string in curly brackets {}
declare #json nvarchar(max)='{"sensors":[
{
"id": "5afd7160f16819f11814f6e2",
"num": 0,
"name": "AC01",
"more": "unused"
},
{ "id": "5afd7160f16819f11814f6e3",
"num": 0,
"name": "AC02"
}]}
'
--insert...
select * from
openjson(#json,'$.sensors') --note the "path" argument here
with(
id varchar(200),
num int,
name varchar(10)
) json --alias sometimes required.
You can use result (rowset) as it is a table.
;With Json_data as
( SELECT
Evaluation_IDNO,
Rating_IDNO,
Notes_TEXT,
NextSteps_TEXT,
EvaluationCategory_CODE,
EvalType_ID
FROM OPENJSON(#As_EvaluationCategory_Json) WITH
(
Evaluation_IDNO INT N'$.matrixId',
Rating_IDNO VARCHAR(150) N'$.ratingValue',
Notes_TEXT VARCHAR(MAX) N'$.notesText',
NextSteps_TEXT VARCHAR(MAX) N'$.nextStepsText',
EvaluationCategory_CODE VARCHAR(50) N'$.ratingData',
EvalType_ID VARCHAR(4) N'$.evalTypeId'
)
AS EvaluationCategories
)
UPDATE EvaluationRatings_T1 SET
UserCreatedBy_ID=#As_SignedOnWorker_ID,
User_ID=#Ls_User_Id,
WorkspaceCreatedBy_ID=#Ls_WorkspaceCreatedBy_Id,
BeginValidity_DTTM=#Ls_Evaluation_DTTM,
EndValidity_DTTM=#Ld_HighValidity_DTTM,
TransactionEvenSeq_NUMB=#An_TransactionEventSeq_NUMB,
Update_DTTM=#Ld_BeginValiditiy_DTTM,
WorkspaceUpdatedBy_ID=#Ls_WorkspaceUpdatedBy_ID,
Evaluation_IDNO=c1.Evaluation_IDNO,
Rating_IDNO=c1.Rating_IDNO,
Notes_TEXT=c1.Notes_TEXT,
NextSteps_TEXT=c1.NextSteps_TEXT,
EvaluationCategory_CODE=c1.EvaluationCategory_CODE,
EvalType_ID=c1.EvalType_ID
FROM Json_data c1
inner JOIN EvaluationRatings_T1 e1 on e1.Evaluation_IDNO=c1.Evaluation_IDNO
WHERE e1.Evaluation_IDNO=#AS_Evaluation_IDNO;