I want to display a string in a table format as shown below:
For a string like 'hi,is,1,question,thanks,.,.,n'
I need this result:
column1 column2 column3 column4 ..... column
hi is 1 question ..... n
DECLARE #string VARCHAR(MAX);
SET #string = 'hi,is,1,question,thanks,.,.,n';
DECLARE #SQL VARCHAR(MAX);
SET #SQL = 'SELECT ''' + REPLACE(#string, ',', ''',''') + '''';
EXEC (#SQL);
Result:
Add SELECT ' at beginning and ' at the end of string
Replace all , with ',' inside string
So string 'hi,is,1,question,thanks,.,.,n' is replace by 'SELECT 'hi','is','1','question','thanks','.','.','n''
Executed as SQL query
PS: If you want to use it on column you will have to combine it with CURSOR
Update
DECLARE #table TABLE
(
ID INT IDENTITY,
string VARCHAR(MAX)
);
INSERT INTO #table
VALUES
('This,is,a,string,,n,elements,..');
INSERT INTO #table
VALUES
('And,one,more');
INSERT INTO #table
VALUES
('Ugly,but,works,,,Yay!,..,,,10,11,12,13,14,15,16,17,18,19,..');
SELECT * FROM #table
DECLARE #string_to_split VARCHAR(MAX);
DECLARE #sql_query_to_execute VARCHAR(MAX);
DECLARE #max_elements INT, #id INT, #i INT;
SET #i = 1;
DECLARE string_cursor CURSOR FOR SELECT ID, string FROM #table;
SELECT #max_elements = MAX(LEN(string) - LEN(REPLACE(string, ',', ''))) + 1 -- Find max number of elements */
FROM #table;
IF OBJECT_ID('tempdb..##my_temp_table_for_splitted_columns') <> 0 -- Create new temp table with valid amount of columns
DROP TABLE ##my_temp_table_for_splited_columns;
SET #sql_query_to_execute = 'create table ##my_temp_table_for_splitted_columns ( ID int,';
WHILE #i <= #max_elements
BEGIN
SET #sql_query_to_execute = #sql_query_to_execute + ' Col' + CAST(#i AS VARCHAR(max)) + ' varchar(25), ';
SET #i = #i + 1;
END;
SELECT #sql_query_to_execute = SUBSTRING(#sql_query_to_execute, 1, LEN(#sql_query_to_execute) - 1) + ')';
EXEC (#sql_query_to_execute);
/* Split string for each row */
OPEN string_cursor;
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split
WHILE ##FETCH_STATUS = 0
BEGIN
SET #i = MAX(LEN(#string_to_split) - LEN(REPLACE(#string_to_split, ',', ''))) + 1; -- check amount of columns for current string
WHILE #i < #max_elements
BEGIN
SET #string_to_split = #string_to_split + ','; -- add missing columns
SET #i = #i + 1;
END;
SET #sql_query_to_execute = 'SELECT ' + CAST(#id AS VARCHAR(MAX)) + ',''' + REPLACE(#string_to_split, ',', ''',''') + '''';
INSERT INTO ##my_temp_table_for_splitted_columns --insert result to temp table
EXEC (#sql_query_to_execute);
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split;
END;
CLOSE string_cursor;
DEALLOCATE string_cursor;
SELECT *
FROM ##my_temp_table_for_splitted_columns;
This is not trivial. You will find a lot of examples how to split your string in a set of fragments. And you will find a lot of examples how to pivot a row set to a single row. But - adding quite some difficulty - you have an unknown count of columns. There are three approaches:
Split this and return your set with a known maximum of columns
Use a dynamically created statement and use EXEC. But this will not work in VIEWs or iTVFs, nor will it work against a table.
Instead of a column list you return a generic container like XML
with a known maximum of columns
One example for the first was this
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT('Column',A.[key]+1) AS ColumnName
FROM OPENJSON('["' + REPLACE(#str,',','","') + '"]') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(Column1,Column2,Column3,Column4,Column5,Column6,Column7,Column8,Column9 /*add as many as you need*/)
) p
Hint: My approach to split the string uses OPENJSON, not available before version 2016. But there are many other approaches you'll find easily. It's just an example to show you the combination of a splitter with PIVOT using a running index to build up a column name.
Unknown count of columns
And the same example with a dynamically created column list was this:
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
DECLARE #CountElements INT=LEN(#str)-LEN(REPLACE(#str,',',''))+1;
DECLARE #columnList NVARCHAR(MAX)=
STUFF((
SELECT TOP(#CountElements)
CONCAT(',Column',ROW_NUMBER() OVER(ORDER BY (SELECT 1)))
FROM master..spt_values /*has a lot of rows*/
FOR XML PATH('')
),1,1,'');
DECLARE #Command NVARCHAR(MAX)=
N'SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT(''Column'',A.[key]+1) AS ColumnName
FROM OPENJSON(''["'' + REPLACE(''' + #str + ''','','',''","'') + ''"]'') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(' + #columnList + ')
) p;';
EXEC(#Command);
Hint: The statement created is exactly the same as above. But the column list in the pivot's IN is created dynamically. This will work with (almost) any count of words generically.
If you need more help, please use the edit option of your question and provide some more details.
An inlineable approach for a table returning a generic container
If you need this against a table, you might try something along this:
DECLARE #tbl TABLE(ID INT IDENTITY,YourList NVARCHAR(MAX));
INSERT INTO #tbl VALUES('This,is,a,string,with,n,elements,...')
,('And,one,more');
SELECT *
,CAST('<x>' + REPLACE((SELECT t.YourList AS [*] FOR XML PATH('')),',','</x><x>') + '</x>' AS XML) AS Splitted
FROM #tbl t
This will return your list as an XML like
<x>This</x>
<x>is</x>
<x>a</x>
<x>string</x>
<x>with</x>
<x>n</x>
<x>elements</x>
<x>...</x>
You can grab - if needed - each element by its index like here
TheXml.value('/x[1]','nvarchar(max)') AS Element1
Related
I am trying to write a SQL query that adds a certain amount of empty columns, based on the number of rows in a related table (t1) for a Crystal Report. These columns should have the header of the name of the dataset.
So it should look something like this:
However I would need to change the script each time a new row gets added (e.g. opening a store - not very often, but it does happen).
I thought about using the pivot function, but I believe the number of rows must be defined - plus, there is no calculation / aggregation happening.
Does anybody have an idea on how to solve this?
As Larnu already mentioned, dynamic SQL would be one way to go. I would suggest using a combination of XML PATH and dynamic SQL. Following an example:
DECLARE #colList VARCHAR(MAX) = (SELECT STUFF((SELECT ',NULL as t1_row' + cast(col1 AS varchar(3))
FROM MyTable
FOR XML PATH('')) ,1,1,'') AS Txt
)
DECLARE #stmt VARCHAR(MAX) = 'SELECT Col1, Col2, Col3, ' + #colList + ' FROM MyTable'
EXEC (#stmt)
I was able to achieve the result using dynamic SQL.
The Script looks something like this:
DECLARE #STRSQL NVARCHAR(MAX) = 'WITH a AS (SELECT ';
DECLARE #Kst nvarchar(6);
DECLARE #Markt NVARCHAR(30);
DECLARE #SCHEMA_NAME VARCHAR(50) = 'XTRADE';
DECLARE C1 CURSOR FOR
SELECT NUMMER, BEZEICHNUNG
from XTRADE.KUNDE
where NUMMER > 99 and NUMMER not in (194, 196, 198)
and (DATUM_SCHLIESSUNG > GETDATE() or DATUM_SCHLIESSUNG is null)
order by BEZEICHNUNG
OPEN C1
PRINT #Kst + ' ' + #Markt
FETCH NEXT
FROM C1 into #Kst, #Markt
while ##FETCH_STATUS = 0
BEGIN
SET #STRSQL = #STRSQL + 'null as [' + #Markt + '], '
FETCH NEXT
FROM C1 into #Kst, #Markt
END
CLOSE C1
DEALLOCATE C1;
SET #STRSQL = left(#STRSQL, len(#Strsql) - 1) + ')'
DECLARE #Statement nvarchar(max) = ', b as (select 1 as Col1, 1 as Col2, 5 as Col3 union all select 2,2,12 union all select 3, 3, 42)';
DECLARE #Exec nvarchar(max) = #STRSQL + #Statement + 'select * from b cross join a';
print #Exec;
exec sp_executesql #Exec
I am trying to write a stored procedure in SQL Server which will:
Take a list of integers as input ( let's assume these integers are "profile_id")
pick up all the table names which has a column named as "profile_id" into a cursor
loop through the cursor and print the profile_id value when it matches one of them in the input list of params.
Now the problem is: I am executing the procedure like this:
EXEC dbo.de_dup '1234,2345';
and getting a syntax error when trying to execute the commented out line below (Please see the procedure):
set #id = (select profile_id from #tname where profile_id in #a_profile_id );
Questions:
What would be the right way of executing and setting the value inside a cursor?
What is way (in our case) to pass a list of integers to this procedure?
This is my procedure:
ALTER PROCEDURE dbo.de_dup
(#a_profile_id nvarchar(MAX))
AS
DECLARE #tname VARCHAR(max),
#id int;
DECLARE tables_cursor CURSOR FOR
SELECT
a.TABLE_CATALOG +'.'+a.TABLE_SCHEMA + '.'+ a.TABLE_NAME AS table_name
FROM
JobApp.INFORMATION_SCHEMA.COLUMNS a
LEFT OUTER JOIN
JobApp.INFORMATION_SCHEMA.VIEWS b ON a.TABLE_CATALOG = b.TABLE_CATALOG
AND a.TABLE_SCHEMA = b.TABLE_SCHEMA
AND a.TABLE_NAME = b.TABLE_NAME
WHERE
a.COLUMN_NAME = 'profile_id'
GROUP BY
a.TABLE_CATALOG, a.TABLE_SCHEMA, a.TABLE_NAME, a.COLUMN_NAME;
OPEN tables_cursor;
FETCH NEXT FROM tables_cursor INTO #tname;
WHILE ##FETCH_STATUS = 0
BEGIN
PRINT #a_profile_id ;
PRINT #tname ;
--set #id= (select profile_id from #tname where profile_id in #a_profile_id );
--PRINT 'id : ' + #id;
FETCH NEXT FROM tables_cursor INTO #tname;
END;
CLOSE tables_cursor;
DEALLOCATE tables_cursor;
GO;
Please let me know should I provide more clarification. Thanks in advance.
This solution is using the dynamic SQL, As per my knowledge we need to use the dynamic SQL if we have the table name in a variable.
DBFIDDLE working code
Query:
CREATE PROCEDURE dbo.de_dup (#a_profile_id NVARCHAR(MAX))
AS
BEGIN
DECLARE #tname VARCHAR(max)
,#id INT
,#dynamicSQL NVARCHAR(MAX);
DECLARE #matched_tables TABLE (Name NVARCHAR(255));
DECLARE #matched_profileIds TABLE (profile_id INT);
DECLARE #profile_ids NVARCHAR(MAX) = #a_profile_id
INSERT INTO #matched_tables
SELECT DISTINCT a.TABLE_SCHEMA + '.' + a.TABLE_NAME AS table_name
FROM INFORMATION_SCHEMA.COLUMNS a
WHERE a.COLUMN_NAME = 'profile_id'
WHILE EXISTS (
SELECT 1
FROM #matched_tables
)
BEGIN
SELECT TOP 1 #tname = [Name]
FROM #matched_tables
SET #dynamicSQL = CONCAT (
'select profile_id from '
,#tname
,' WHERE '
,''','
,#profile_ids
,','''
,' LIKE '
,'''%,'
,''''
,' + CAST(profile_id AS NVARCHAR(MAX)) + '
,''',%'
,''''
)
PRINT #dynamicSQL;
INSERT INTO #matched_profileIds
EXEC (#dynamicSQL)
DELETE
FROM #matched_tables
WHERE [Name] = #tname
END
SELECT *
FROM #matched_profileIds
END
Dynamic SQL that gets formed is
SELECT profile_id
FROM dbo.TestTable
WHERE ',123,456,789,1011,1213,' LIKE '%,' + CAST(profile_id AS NVARCHAR(MAX)) + ',%'
So I have solved a similar issue with a table-valued function called Split. It splits a delimited list into rows in a table, which you can then JOIN or use as a subquery in your code.
CREATE FUNCTION [dbo].[Split]
(
#char_array varchar(500), #delimiter char(1)
)
RETURNS
#parsed_array table
(
Parsed varchar(50)
)
AS
BEGIN
DECLARE #parsed varchar(50), #pos int
SET #char_array = LTRIM(RTRIM(#char_array))+ #delimiter
SET #pos = CHARINDEX(#delimiter, #char_array, 1)
IF REPLACE(#char_array, #delimiter, '') <> ''
BEGIN
WHILE #pos > 0
BEGIN
SET #parsed = LTRIM(RTRIM(LEFT(#char_array, #pos - 1)))
IF #parsed <> ''
BEGIN
INSERT INTO #parsed_array (Parsed)
VALUES (#parsed)
END
SET #char_array = RIGHT(#char_array, LEN(#char_array) - #pos)
SET #pos = CHARINDEX(#delimiter, #char_array, 1)
END
END
RETURN
END
GO
You would use it like so
SELECT f.Parsed INTO #s FROM dbo.Split(#a_profile_id, ',') f;
Then in your query (only the relevant part for brevity)
select profile_id from #tname where profile_id in(select Parsed from #s);
I left out the set #id= because that will produce unpredictable results for the value of #id if the select statement returns multiple results. But you indicated this is not the actual code anyway so...
Disclaimer: I got the meat of the Split function from someone else online. If I could remember who I would attribute it properly.
i try conver some string such as '1,2,3' to 'a,b,c' with the anwser:
select stuff(
(
select ',' + realname from sys_user
where ','+'1,2,3'+',' like '%,'+cast(u_id as varchar(10))+',%' for xml path('')
),1,1,'')
charindex is well done. but i want to create a more common function, so that i can convert in any relation such that.
i try a function :
create function [dbo].[fn_enum2str]
(
#enum as varchar(1000),
#table_name as varchar(100),
#origin_field as varchar(100),
#target_field as varchar(100)
)
as
begin
declare #result varchar(1000)
declare #sqlstr nvarchar(1000)
set #sqlstr = 'set #result = ('
set #sqlstr = #sqlstr + 'select stuff('
set #sqlstr = #sqlstr + '(select '','' + ' +#target_field+ ' from ' + #table_name
set #sqlstr = #sqlstr + ' where '','+#enum+','' like ''%,''+cast('+#origin_field+' as varchar)+'',%'' for xml path(''''))'
set #sqlstr = #sqlstr + ',1,1,''''))'
exec(#sqlstr)
return #result
end
it faild with error, as you know, it is not allow to exec a dynamic sql in function.
i want to
select dbo.fn_enum2str(a.uids,'sys_user','u_id', 'realname') from my_table a
--output 'a,b,c'
so, in my question, how can i create a function or a proc to deal it ?
Suppose you have SQL-SERVER2016 you can use string_split like this:
Test data
CREATE TABLE [dbo].[stringlist]([Numbers] [nvarchar](50) NULL)
Insert into dbo.Stringlist(numbers)
values('1,2,3,4,5,10')
SQL Function
alter function dbo.HinkyBase26( #Value as varchar(250) ) returns VarChar(250) as
begin
--declare #Value as varchar(50) = '13,14,1,2,5,14'
-- Notes: 'A' = 0. Negative numbers are not handled.
declare #Result as VarChar(250) = '';
declare #stringsplit table (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
select numbers = #Value ,CHAR(64 + value) as Letters from string_split(#Value,',')
select #Result = Letter from (
select numbers,Letter = STUFF((Select ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
)z
return #Result
end
Execution of function
SELECT TOP (1000) [Numbers],dbo.HinkyBase26(Numbers)
FROM [LegOgSpass].[dbo].[stringlist]
SQL Stored Proc
Create PROC dbo.usp_convertnumberstostring
#stringvalue nvarchar(250)
AS
BEGIN
Create table #stringsplit (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
SELECT Numbers = #stringvalue,CHAR(64 + value) as Letters
from string_split(#stringvalue,',')
select numbers,Letter = STUFF((Select DISTINCT ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
drop table #stringsplit
END
Execute SP
DECLARE #RC int
DECLARE #stringvalue nvarchar(250) = '1,5,6'
-- TODO: Set parameter values here.
EXECUTE #RC = [dbo].[usp_convertnumberstostring]
#stringvalue
GO
Result
SQL Script
Create table #stringsplit (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
SELECT Numbers,CHAR(64 + value) as Letters
FROM [LegOgSpass].[dbo].[stringlist] a
cross apply string_split(numbers,',')
select numbers,Letter = STUFF((Select DISTINCT ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
Drop table #stringsplit
CREATE function [dbo].[fn_enum2str]
(
#enum as varchar(1000),
#table_name as varchar(100)
)
returns varchar(1000)
as
begin
declare #result varchar(1000)
if #enum is null
return ''
if #table_name = 'sys_user'
set #result = (
select stuff(
(
select ',' + realname from sys_user
where ','+#enum+',' like '%,'+cast(u_id as varchar(10))+',%' for xml path('')
),1,1,''
)
)
if #table_name = 'sys_attachment'
set #result = (
select stuff(
(
select ',/' + filepath from sys_attachment
where ','+#enum+',' like '%,'+cast(aid as varchar(10))+',%' for xml path('')
),1,1,''
)
)
return #result
end
GO
only way to deal it what i can think of, to switch which sql will be exec by a flag. when other relation apearance, add it to the switch list.
select
dbo.fn_enum2str(a.uids, 'sys_user') as names,
dbo.fn_enum2str(a.attachids, 'sys_attachment') as filepaths
from my_table a
so that it can be overlay. yes, it is difficult to remember stuff or for xml path or listagg(oracle), and result to a long sql, and i am lazy.😄
if you have any anwser better, tell me, thanks.
I have table sdata and it has 35 columns (id, name, TRx1, TRx2, TRx3, TRx4,..., TRx30, city, score, total)
I want to fetch data from the TRx1,...TRx30 columns.
Can I use loop here?
I did following code:
DECLARE #flag INT
DECLARE #sel varchar(255)
DECLARE #frm varchar(255)
SET #flag = 1;
SET #sel = 'select TRx';
SET #frm = ' from sdata';
exec(#sel +
(WHILE #flag <=5
#flag
SET #flag = #flag + 1)
+ #frm)
What wrong am I doing? And how can I resolve this?
If your table name is sdata, this code should work for you:
-- Grab the names of all the remaining columns
DECLARE #sql nvarchar(MAX);
DECLARE #columns nvarchar(MAX);
SELECT #columns = STUFF ( ( SELECT N'], [' + name
FROM sys.columns
WHERE object_id = (select top 1 object_id FROM sys.objects where name = 'sdata')
AND name LIKE 'TRx%' -- To limit which columns
ORDER BY column_id
FOR XML PATH('')), 1, 2, '') + ']';
PRINT #columns
SELECT #sql = 'SELECT ' + #columns + ' FROM sdata';
PRINT #sql;
EXEC (#sql);
Note I included PRINT statements so you could see what's going on. You might want to comment out the EXEC while testing.
This would be much easier to do by just copy/pasting the column names and changing them to be the correct one. However if you must do it this way, I do not advise using a loop at all. This method uses a tally table to generate the columns you want to select (in this example, columns 1 through 30, but that can be changed), then generates a dynamic SQL statement to execute against the SData table:
Declare #From Int = 1,
#To Int = 30,
#Sql NVarchar (Max)
Declare #Columns Table (Col Varchar (255))
;With Nums As
(
Select *
From (Values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9)) As V(N)
), Tally As
(
Select Row_Number() Over (Order By (Select Null)) As N
From Nums A --10
Cross Join Nums B --100
Cross Join Nums C --1000
)
Insert #Columns
Select 'TRx' + Cast(N As Varchar)
From Tally
Where N Between #From And #To
;With Cols As
(
Select (
Select QuoteName(Col) + ',' As [text()]
From #Columns
For Xml Path ('')
) As Cols
)
Select #Sql = 'Select ' + Left(Cols, Len(Cols) - 1) + ' From SData'
From Cols
--Select #Sql
Execute (#Sql)
Note: The --Select #Sql section is there to preview the generated query before executing it.
You can select the column names like this:
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'my name here'
After a lot of searching and piecing together the very excellent techniques for converting result sets using the FOR XML and .nodes() commands that are around the web, I was able to create this single query (not a stored procedure) which does a reasonably good job of converting any arbitrary SQL query to a JSON array.
The query will encode each data row as a single JSON object with a leading comma.
The data rows are wrapped by brackets and the whole result set is then expected to be exported to a file.
I'd like to see if anyone out there can see ways to improve its performance?
Here's the query with a sample table:
declare #xd table (col1 varchar(max), col2 int, col3 real, colNull int)
insert into #xd
select '', null, null, null
UNION ALL select 'ItemA', 123, 123.123, null
UNION ALL select 'ItemB', 456, 456.456, null
UNION ALL select '7890', 789, 789.789, null
select '[{}'
UNION ALL
select ',{' + STUFF((
(select ','
+ '"' + r.value('local-name(.)', 'varchar(max)') + '":'
+ case when r.value('./#xsi:nil', 'varchar(max)') = 'true' then 'null'
when isnumeric(r.value('.', 'varchar(max)')) = 1
then r.value('.', 'varchar(max)')
else '"' + r.value('.', 'varchar(max)') + '"'
end
from rows.nodes('/row/*') as x(r) for xml path(''))
), 1, 1, '') + '}'
from (
-- Arbitrary query goes here, (fields go where t.* is, table where #xd t is)
select (select t.* for xml raw,type,elements XSINIL) rows
from #xd t
) xd
UNION ALL
select ']'
My biggest critique of it, is that it's insanely slow.
It currently takes about 3:30 for ~42,000 rows.
My other big critique is that it currently assumes that everything that looks like a number is a number. It doesn't try to discover column type in the least (and I'm not even sure if it can).
A final minor critique is that the first data row will have a comma up front and technically it shouldn't. To compensate for that it requires that empty JSON object in the first row that starts the JSON array.
Other critiques (preferably with solutions) invited, the only real limitation I have is that the solution be decently repeatable on many arbitrary SQL queries without having to explicitly identify the column names.
I'm using SQL Server 2012.
Thanks and to anyone else like me who was looking for a generalized SQL Results -> JSON Array converter, ENJOY!
I say if you really want to kick up performance, use metaprogramming. The example below tries this with 40,000 rows and returns results in less than a second (not counting inserting the initial 40k rows, which in this example only takes about 2 seconds). It also takes into account your data types to not enclose numbers in quotes.
declare #xd table (col1 varchar(max), col2 int, col3 real, colDate datetime, colNull int);
declare #i int = 0;
while #i < 10000 begin
set #i += 1;
insert into #xd
select '', null, null, null, null
union all select 'ItemA', 123, 123.123, getDate(), null
union all select 'ItemB', 456, 456.456, getDate(), null
union all select '7890', 789, 789.789, getDate(), null;
end;
select *
into #json_base
from (
-- Insert SQL Statement here
select * from #xd
) t;
declare #columns table (
id int identity primary key,
name sysname,
datatype sysname,
is_number bit,
is_date bit);
insert into #columns(name, datatype, is_number, is_date)
select columns.name, types.name,
case when number_types.name is not NULL
then 1 else 0
end as is_number,
case when date_types.name is not NULL
then 1 else 0
end as is_date
from tempdb.sys.columns
join tempdb.sys.types
on (columns.system_type_id = types.system_type_id)
left join (values ('int'), ('real'), ('numeric'),
('decimal'), ('bigint'), ('tinyint')) as number_types(name)
on (types.name = number_types.name)
left join (values ('date'), ('datetime'), ('datetime2'),
('smalldatetime'), ('time'), ('datetimeoffset')) as date_types(name)
on (types.name = date_types.name)
where object_id = OBJECT_ID('tempdb..#json_base');
declare #field_list varchar(max) = STUFF((
select '+'',''+' + QUOTENAME(QUOTENAME(name, '"') + ':', '''')
+ '+' + case when is_number = 1
then 'COALESCE(LTRIM('
+ QUOTENAME(name) + '),''null'')'
when is_date = 1
then 'COALESCE(QUOTENAME(LTRIM(convert(varchar(max), '
+ QUOTENAME(name) + ', 126)),''"''),''null'')'
else 'COALESCE(QUOTENAME('
+ QUOTENAME(name) + ',''"''),''null'')'
end
from #columns
for xml path('')),
1, 5, '');
create table #json_result (
id int identity primary key,
line varchar(max));
declare #sql varchar(max) = REPLACE(
'insert into #json_result '
+ 'select '',{''+{f}+''}'' '
+ 'from #json_base', '{f}', #field_list);
exec(#sql);
update #json_result
set line = STUFF(line, 1, 1, '')
where id = 1;
select '['
UNION ALL
select line
from #json_result
UNION ALL
select ']';
drop table #json_base;
drop table #json_result;
From Firoz Ansari:
CREATE PROCEDURE [dbo].[GetJSON] (
#ParameterSQL AS VARCHAR(MAX)
)
AS
BEGIN
DECLARE #SQL NVARCHAR(MAX)
DECLARE #XMLString VARCHAR(MAX)
DECLARE #XML XML
DECLARE #Paramlist NVARCHAR(1000)
SET #Paramlist = N'#XML XML OUTPUT'
SET #SQL = 'WITH PrepareTable (XMLString) '
SET #SQL = #SQL + 'AS ( '
SET #SQL = #SQL + #ParameterSQL+ ' FOR XML RAW, TYPE, ELEMENTS '
SET #SQL = #SQL + ') '
SET #SQL = #SQL + 'SELECT #XML = XMLString FROM PrepareTable '
EXEC sp_executesql #SQL, #Paramlist, #XML=#XML OUTPUT
SET #XMLString = CAST(#XML AS VARCHAR(MAX))
DECLARE #JSON VARCHAR(MAX)
DECLARE #Row VARCHAR(MAX)
DECLARE #RowStart INT
DECLARE #RowEnd INT
DECLARE #FieldStart INT
DECLARE #FieldEnd INT
DECLARE #Key VARCHAR(MAX)
DECLARE #Value VARCHAR(MAX)
DECLARE #StartRoot VARCHAR(100); SET #StartRoot = ''
DECLARE #EndRoot VARCHAR(100); SET #EndRoot = ''
DECLARE #StartField VARCHAR(100); SET #StartField = ''
SET #RowStart = CharIndex(#StartRoot, #XMLString, 0)
SET #JSON = ''
WHILE #RowStart > 0
BEGIN
SET #RowStart = #RowStart+Len(#StartRoot)
SET #RowEnd = CharIndex(#EndRoot, #XMLString, #RowStart)
SET #Row = SubString(#XMLString, #RowStart, #RowEnd-#RowStart)
SET #JSON = #JSON+'{'
-- for each row
SET #FieldStart = CharIndex(#StartField, #Row, 0)
WHILE #FieldStart > 0
BEGIN
-- parse node key
SET #FieldStart = #FieldStart+Len(#StartField)
SET #FieldEnd = CharIndex(#EndField, #Row, #FieldStart)
SET #Key = SubString(#Row, #FieldStart, #FieldEnd-#FieldStart)
SET #JSON = #JSON+'"'+#Key+'":'
-- parse node value
SET #FieldStart = #FieldEnd+1
SET #FieldEnd = CharIndex('0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = #JSON+'},'
--/ for each row
SET #RowStart = CharIndex(#StartRoot, #XMLString, #RowEnd)
END
IF LEN(#JSON) > 0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = '[' + #JSON + ']'
SELECT #JSON
END