After a lot of searching and piecing together the very excellent techniques for converting result sets using the FOR XML and .nodes() commands that are around the web, I was able to create this single query (not a stored procedure) which does a reasonably good job of converting any arbitrary SQL query to a JSON array.
The query will encode each data row as a single JSON object with a leading comma.
The data rows are wrapped by brackets and the whole result set is then expected to be exported to a file.
I'd like to see if anyone out there can see ways to improve its performance?
Here's the query with a sample table:
declare #xd table (col1 varchar(max), col2 int, col3 real, colNull int)
insert into #xd
select '', null, null, null
UNION ALL select 'ItemA', 123, 123.123, null
UNION ALL select 'ItemB', 456, 456.456, null
UNION ALL select '7890', 789, 789.789, null
select '[{}'
UNION ALL
select ',{' + STUFF((
(select ','
+ '"' + r.value('local-name(.)', 'varchar(max)') + '":'
+ case when r.value('./#xsi:nil', 'varchar(max)') = 'true' then 'null'
when isnumeric(r.value('.', 'varchar(max)')) = 1
then r.value('.', 'varchar(max)')
else '"' + r.value('.', 'varchar(max)') + '"'
end
from rows.nodes('/row/*') as x(r) for xml path(''))
), 1, 1, '') + '}'
from (
-- Arbitrary query goes here, (fields go where t.* is, table where #xd t is)
select (select t.* for xml raw,type,elements XSINIL) rows
from #xd t
) xd
UNION ALL
select ']'
My biggest critique of it, is that it's insanely slow.
It currently takes about 3:30 for ~42,000 rows.
My other big critique is that it currently assumes that everything that looks like a number is a number. It doesn't try to discover column type in the least (and I'm not even sure if it can).
A final minor critique is that the first data row will have a comma up front and technically it shouldn't. To compensate for that it requires that empty JSON object in the first row that starts the JSON array.
Other critiques (preferably with solutions) invited, the only real limitation I have is that the solution be decently repeatable on many arbitrary SQL queries without having to explicitly identify the column names.
I'm using SQL Server 2012.
Thanks and to anyone else like me who was looking for a generalized SQL Results -> JSON Array converter, ENJOY!
I say if you really want to kick up performance, use metaprogramming. The example below tries this with 40,000 rows and returns results in less than a second (not counting inserting the initial 40k rows, which in this example only takes about 2 seconds). It also takes into account your data types to not enclose numbers in quotes.
declare #xd table (col1 varchar(max), col2 int, col3 real, colDate datetime, colNull int);
declare #i int = 0;
while #i < 10000 begin
set #i += 1;
insert into #xd
select '', null, null, null, null
union all select 'ItemA', 123, 123.123, getDate(), null
union all select 'ItemB', 456, 456.456, getDate(), null
union all select '7890', 789, 789.789, getDate(), null;
end;
select *
into #json_base
from (
-- Insert SQL Statement here
select * from #xd
) t;
declare #columns table (
id int identity primary key,
name sysname,
datatype sysname,
is_number bit,
is_date bit);
insert into #columns(name, datatype, is_number, is_date)
select columns.name, types.name,
case when number_types.name is not NULL
then 1 else 0
end as is_number,
case when date_types.name is not NULL
then 1 else 0
end as is_date
from tempdb.sys.columns
join tempdb.sys.types
on (columns.system_type_id = types.system_type_id)
left join (values ('int'), ('real'), ('numeric'),
('decimal'), ('bigint'), ('tinyint')) as number_types(name)
on (types.name = number_types.name)
left join (values ('date'), ('datetime'), ('datetime2'),
('smalldatetime'), ('time'), ('datetimeoffset')) as date_types(name)
on (types.name = date_types.name)
where object_id = OBJECT_ID('tempdb..#json_base');
declare #field_list varchar(max) = STUFF((
select '+'',''+' + QUOTENAME(QUOTENAME(name, '"') + ':', '''')
+ '+' + case when is_number = 1
then 'COALESCE(LTRIM('
+ QUOTENAME(name) + '),''null'')'
when is_date = 1
then 'COALESCE(QUOTENAME(LTRIM(convert(varchar(max), '
+ QUOTENAME(name) + ', 126)),''"''),''null'')'
else 'COALESCE(QUOTENAME('
+ QUOTENAME(name) + ',''"''),''null'')'
end
from #columns
for xml path('')),
1, 5, '');
create table #json_result (
id int identity primary key,
line varchar(max));
declare #sql varchar(max) = REPLACE(
'insert into #json_result '
+ 'select '',{''+{f}+''}'' '
+ 'from #json_base', '{f}', #field_list);
exec(#sql);
update #json_result
set line = STUFF(line, 1, 1, '')
where id = 1;
select '['
UNION ALL
select line
from #json_result
UNION ALL
select ']';
drop table #json_base;
drop table #json_result;
From Firoz Ansari:
CREATE PROCEDURE [dbo].[GetJSON] (
#ParameterSQL AS VARCHAR(MAX)
)
AS
BEGIN
DECLARE #SQL NVARCHAR(MAX)
DECLARE #XMLString VARCHAR(MAX)
DECLARE #XML XML
DECLARE #Paramlist NVARCHAR(1000)
SET #Paramlist = N'#XML XML OUTPUT'
SET #SQL = 'WITH PrepareTable (XMLString) '
SET #SQL = #SQL + 'AS ( '
SET #SQL = #SQL + #ParameterSQL+ ' FOR XML RAW, TYPE, ELEMENTS '
SET #SQL = #SQL + ') '
SET #SQL = #SQL + 'SELECT #XML = XMLString FROM PrepareTable '
EXEC sp_executesql #SQL, #Paramlist, #XML=#XML OUTPUT
SET #XMLString = CAST(#XML AS VARCHAR(MAX))
DECLARE #JSON VARCHAR(MAX)
DECLARE #Row VARCHAR(MAX)
DECLARE #RowStart INT
DECLARE #RowEnd INT
DECLARE #FieldStart INT
DECLARE #FieldEnd INT
DECLARE #Key VARCHAR(MAX)
DECLARE #Value VARCHAR(MAX)
DECLARE #StartRoot VARCHAR(100); SET #StartRoot = ''
DECLARE #EndRoot VARCHAR(100); SET #EndRoot = ''
DECLARE #StartField VARCHAR(100); SET #StartField = ''
SET #RowStart = CharIndex(#StartRoot, #XMLString, 0)
SET #JSON = ''
WHILE #RowStart > 0
BEGIN
SET #RowStart = #RowStart+Len(#StartRoot)
SET #RowEnd = CharIndex(#EndRoot, #XMLString, #RowStart)
SET #Row = SubString(#XMLString, #RowStart, #RowEnd-#RowStart)
SET #JSON = #JSON+'{'
-- for each row
SET #FieldStart = CharIndex(#StartField, #Row, 0)
WHILE #FieldStart > 0
BEGIN
-- parse node key
SET #FieldStart = #FieldStart+Len(#StartField)
SET #FieldEnd = CharIndex(#EndField, #Row, #FieldStart)
SET #Key = SubString(#Row, #FieldStart, #FieldEnd-#FieldStart)
SET #JSON = #JSON+'"'+#Key+'":'
-- parse node value
SET #FieldStart = #FieldEnd+1
SET #FieldEnd = CharIndex('0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = #JSON+'},'
--/ for each row
SET #RowStart = CharIndex(#StartRoot, #XMLString, #RowEnd)
END
IF LEN(#JSON) > 0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = '[' + #JSON + ']'
SELECT #JSON
END
Related
I want to display a string in a table format as shown below:
For a string like 'hi,is,1,question,thanks,.,.,n'
I need this result:
column1 column2 column3 column4 ..... column
hi is 1 question ..... n
DECLARE #string VARCHAR(MAX);
SET #string = 'hi,is,1,question,thanks,.,.,n';
DECLARE #SQL VARCHAR(MAX);
SET #SQL = 'SELECT ''' + REPLACE(#string, ',', ''',''') + '''';
EXEC (#SQL);
Result:
Add SELECT ' at beginning and ' at the end of string
Replace all , with ',' inside string
So string 'hi,is,1,question,thanks,.,.,n' is replace by 'SELECT 'hi','is','1','question','thanks','.','.','n''
Executed as SQL query
PS: If you want to use it on column you will have to combine it with CURSOR
Update
DECLARE #table TABLE
(
ID INT IDENTITY,
string VARCHAR(MAX)
);
INSERT INTO #table
VALUES
('This,is,a,string,,n,elements,..');
INSERT INTO #table
VALUES
('And,one,more');
INSERT INTO #table
VALUES
('Ugly,but,works,,,Yay!,..,,,10,11,12,13,14,15,16,17,18,19,..');
SELECT * FROM #table
DECLARE #string_to_split VARCHAR(MAX);
DECLARE #sql_query_to_execute VARCHAR(MAX);
DECLARE #max_elements INT, #id INT, #i INT;
SET #i = 1;
DECLARE string_cursor CURSOR FOR SELECT ID, string FROM #table;
SELECT #max_elements = MAX(LEN(string) - LEN(REPLACE(string, ',', ''))) + 1 -- Find max number of elements */
FROM #table;
IF OBJECT_ID('tempdb..##my_temp_table_for_splitted_columns') <> 0 -- Create new temp table with valid amount of columns
DROP TABLE ##my_temp_table_for_splited_columns;
SET #sql_query_to_execute = 'create table ##my_temp_table_for_splitted_columns ( ID int,';
WHILE #i <= #max_elements
BEGIN
SET #sql_query_to_execute = #sql_query_to_execute + ' Col' + CAST(#i AS VARCHAR(max)) + ' varchar(25), ';
SET #i = #i + 1;
END;
SELECT #sql_query_to_execute = SUBSTRING(#sql_query_to_execute, 1, LEN(#sql_query_to_execute) - 1) + ')';
EXEC (#sql_query_to_execute);
/* Split string for each row */
OPEN string_cursor;
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split
WHILE ##FETCH_STATUS = 0
BEGIN
SET #i = MAX(LEN(#string_to_split) - LEN(REPLACE(#string_to_split, ',', ''))) + 1; -- check amount of columns for current string
WHILE #i < #max_elements
BEGIN
SET #string_to_split = #string_to_split + ','; -- add missing columns
SET #i = #i + 1;
END;
SET #sql_query_to_execute = 'SELECT ' + CAST(#id AS VARCHAR(MAX)) + ',''' + REPLACE(#string_to_split, ',', ''',''') + '''';
INSERT INTO ##my_temp_table_for_splitted_columns --insert result to temp table
EXEC (#sql_query_to_execute);
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split;
END;
CLOSE string_cursor;
DEALLOCATE string_cursor;
SELECT *
FROM ##my_temp_table_for_splitted_columns;
This is not trivial. You will find a lot of examples how to split your string in a set of fragments. And you will find a lot of examples how to pivot a row set to a single row. But - adding quite some difficulty - you have an unknown count of columns. There are three approaches:
Split this and return your set with a known maximum of columns
Use a dynamically created statement and use EXEC. But this will not work in VIEWs or iTVFs, nor will it work against a table.
Instead of a column list you return a generic container like XML
with a known maximum of columns
One example for the first was this
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT('Column',A.[key]+1) AS ColumnName
FROM OPENJSON('["' + REPLACE(#str,',','","') + '"]') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(Column1,Column2,Column3,Column4,Column5,Column6,Column7,Column8,Column9 /*add as many as you need*/)
) p
Hint: My approach to split the string uses OPENJSON, not available before version 2016. But there are many other approaches you'll find easily. It's just an example to show you the combination of a splitter with PIVOT using a running index to build up a column name.
Unknown count of columns
And the same example with a dynamically created column list was this:
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
DECLARE #CountElements INT=LEN(#str)-LEN(REPLACE(#str,',',''))+1;
DECLARE #columnList NVARCHAR(MAX)=
STUFF((
SELECT TOP(#CountElements)
CONCAT(',Column',ROW_NUMBER() OVER(ORDER BY (SELECT 1)))
FROM master..spt_values /*has a lot of rows*/
FOR XML PATH('')
),1,1,'');
DECLARE #Command NVARCHAR(MAX)=
N'SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT(''Column'',A.[key]+1) AS ColumnName
FROM OPENJSON(''["'' + REPLACE(''' + #str + ''','','',''","'') + ''"]'') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(' + #columnList + ')
) p;';
EXEC(#Command);
Hint: The statement created is exactly the same as above. But the column list in the pivot's IN is created dynamically. This will work with (almost) any count of words generically.
If you need more help, please use the edit option of your question and provide some more details.
An inlineable approach for a table returning a generic container
If you need this against a table, you might try something along this:
DECLARE #tbl TABLE(ID INT IDENTITY,YourList NVARCHAR(MAX));
INSERT INTO #tbl VALUES('This,is,a,string,with,n,elements,...')
,('And,one,more');
SELECT *
,CAST('<x>' + REPLACE((SELECT t.YourList AS [*] FOR XML PATH('')),',','</x><x>') + '</x>' AS XML) AS Splitted
FROM #tbl t
This will return your list as an XML like
<x>This</x>
<x>is</x>
<x>a</x>
<x>string</x>
<x>with</x>
<x>n</x>
<x>elements</x>
<x>...</x>
You can grab - if needed - each element by its index like here
TheXml.value('/x[1]','nvarchar(max)') AS Element1
i try conver some string such as '1,2,3' to 'a,b,c' with the anwser:
select stuff(
(
select ',' + realname from sys_user
where ','+'1,2,3'+',' like '%,'+cast(u_id as varchar(10))+',%' for xml path('')
),1,1,'')
charindex is well done. but i want to create a more common function, so that i can convert in any relation such that.
i try a function :
create function [dbo].[fn_enum2str]
(
#enum as varchar(1000),
#table_name as varchar(100),
#origin_field as varchar(100),
#target_field as varchar(100)
)
as
begin
declare #result varchar(1000)
declare #sqlstr nvarchar(1000)
set #sqlstr = 'set #result = ('
set #sqlstr = #sqlstr + 'select stuff('
set #sqlstr = #sqlstr + '(select '','' + ' +#target_field+ ' from ' + #table_name
set #sqlstr = #sqlstr + ' where '','+#enum+','' like ''%,''+cast('+#origin_field+' as varchar)+'',%'' for xml path(''''))'
set #sqlstr = #sqlstr + ',1,1,''''))'
exec(#sqlstr)
return #result
end
it faild with error, as you know, it is not allow to exec a dynamic sql in function.
i want to
select dbo.fn_enum2str(a.uids,'sys_user','u_id', 'realname') from my_table a
--output 'a,b,c'
so, in my question, how can i create a function or a proc to deal it ?
Suppose you have SQL-SERVER2016 you can use string_split like this:
Test data
CREATE TABLE [dbo].[stringlist]([Numbers] [nvarchar](50) NULL)
Insert into dbo.Stringlist(numbers)
values('1,2,3,4,5,10')
SQL Function
alter function dbo.HinkyBase26( #Value as varchar(250) ) returns VarChar(250) as
begin
--declare #Value as varchar(50) = '13,14,1,2,5,14'
-- Notes: 'A' = 0. Negative numbers are not handled.
declare #Result as VarChar(250) = '';
declare #stringsplit table (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
select numbers = #Value ,CHAR(64 + value) as Letters from string_split(#Value,',')
select #Result = Letter from (
select numbers,Letter = STUFF((Select ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
)z
return #Result
end
Execution of function
SELECT TOP (1000) [Numbers],dbo.HinkyBase26(Numbers)
FROM [LegOgSpass].[dbo].[stringlist]
SQL Stored Proc
Create PROC dbo.usp_convertnumberstostring
#stringvalue nvarchar(250)
AS
BEGIN
Create table #stringsplit (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
SELECT Numbers = #stringvalue,CHAR(64 + value) as Letters
from string_split(#stringvalue,',')
select numbers,Letter = STUFF((Select DISTINCT ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
drop table #stringsplit
END
Execute SP
DECLARE #RC int
DECLARE #stringvalue nvarchar(250) = '1,5,6'
-- TODO: Set parameter values here.
EXECUTE #RC = [dbo].[usp_convertnumberstostring]
#stringvalue
GO
Result
SQL Script
Create table #stringsplit (numbers nvarchar(50),Letters varchar(1))
insert into #stringsplit(numbers,Letters)
SELECT Numbers,CHAR(64 + value) as Letters
FROM [LegOgSpass].[dbo].[stringlist] a
cross apply string_split(numbers,',')
select numbers,Letter = STUFF((Select DISTINCT ', ' + Letters
from #stringsplit b
where b.numbers = a.numbers
FOR XML PATH('')),1,2,'')
from #stringsplit a
group by numbers
Drop table #stringsplit
CREATE function [dbo].[fn_enum2str]
(
#enum as varchar(1000),
#table_name as varchar(100)
)
returns varchar(1000)
as
begin
declare #result varchar(1000)
if #enum is null
return ''
if #table_name = 'sys_user'
set #result = (
select stuff(
(
select ',' + realname from sys_user
where ','+#enum+',' like '%,'+cast(u_id as varchar(10))+',%' for xml path('')
),1,1,''
)
)
if #table_name = 'sys_attachment'
set #result = (
select stuff(
(
select ',/' + filepath from sys_attachment
where ','+#enum+',' like '%,'+cast(aid as varchar(10))+',%' for xml path('')
),1,1,''
)
)
return #result
end
GO
only way to deal it what i can think of, to switch which sql will be exec by a flag. when other relation apearance, add it to the switch list.
select
dbo.fn_enum2str(a.uids, 'sys_user') as names,
dbo.fn_enum2str(a.attachids, 'sys_attachment') as filepaths
from my_table a
so that it can be overlay. yes, it is difficult to remember stuff or for xml path or listagg(oracle), and result to a long sql, and i am lazy.😄
if you have any anwser better, tell me, thanks.
Suppose I have the below table ( TestTable ) :
ID , SystemID , UserID ( all columns are of type int )
I want to write a stored procedure that should accept a string parameter; its value like ((5 and 6) or 7) to return all users that apply the below queries :
Select * From TestTable Where SystemID = 5
Intersect
Select * From TestTable Where SystemID = 6
and the above result is union with
Select * From TestTable Where SystemID = 7
SP must accept any combination like (((4 or 5) and 6) or 8) , (((5 or 9) or 8) and 10) .. etc
How can I implement that ?
Update : my issue isn't how to split the string .. but how can i make dynamic sql to implement it's logical mean
DECLARE #param NVARCHAR(MAX) = N'4 or 5 and 6 or 8 and 10';
DECLARE
#sql NVARCHAR(MAX) = N'',
#q NVARCHAR(MAX) = N'SELECT UserID FROM dbo.TestTable WHERE SystemID = ';
SELECT #sql = #q + REPLACE(REPLACE(#param, ' or ', '
UNION ALL ' + #q),
' and ', '
INTERSECT ' + #q);
PRINT #sql;
-- EXEC sp_executesql #sql;
Results:
SELECT UserID FROM dbo.TestTable WHERE SystemID = 4
UNION ALL SELECT UserID FROM dbo.TestTable WHERE SystemID = 5
INTERSECT SELECT UserID FROM dbo.TestTable WHERE SystemID = 6
UNION ALL SELECT UserID FROM dbo.TestTable WHERE SystemID = 8
INTERSECT SELECT UserID FROM dbo.TestTable WHERE SystemID = 10
Now, whether this query yields the results you're actually after, I have no idea, but I believe it meets the requirements as stated.
Try this... I have little changed Aaron Bertrand's query.
DECLARE #param NVARCHAR(MAX) = N'(((4 or 5) and 6) or 8)';
DECLARE #sql NVARCHAR(MAX) = N'',
#q NVARCHAR(MAX) = N'SELECT * FROM dbo.TestTable WHERE SystemID = ',
#paranth NVARCHAR(100) = substring(#param,0,PATINDEX('%[0-9]%',#param));
set #param =substring(#param,PATINDEX('%[0-9]%',#param),len(#param)-PATINDEX('%[0-9]%',#param))
SELECT #sql = #q + REPLACE(REPLACE(#param, ' or ', '
UNION ALL ' + #q),
' and ', '
INTERSECT ' + #q);
set #sql=#paranth+#sql
if (isnull(#paranth,'')<>'')
set #sql=#sql+')'
PRINT #sql;
You could use a CSV to Integer table value function inside your SP. You have to create the CsvToInt function first of course. Then you can use it inside your Stored Procedure to turn a parameter into an integer list. As pointed out this only suits the "Or" component of your dynamic search criteria.
You can use this in conjunction with EXEC or sp_executesql. Which will allow you to add sql as a parameter.
SET #myBaseQuery = 'SELECT * FROM TestTable WHERE SystemId = ' + #myParam
EXECUTE(#myBaseQuery)
or
SELECT * FROM TestTable WHERE SystemID IN (SELECT IntValue FROM dbo.CsvToInt('2,3,4,5,6'))
-- use your parameters
CREATE FUNCTION [dbo].[CsvToInt] ( #Array VARCHAR(1000))
RETURNS #IntTable TABLE
(IntValue INT)
AS
BEGIN
DECLARE #separator CHAR(1)
SET #separator = ','
DECLARE #separator_position INT
DECLARE #array_value VARCHAR(1000)
SET #array = #array + ','
While patindex('%,%' , #array) <> 0
BEGIN
SELECT #separator_position = patindex('%,%' , #array)
SELECT #array_value = LEFT(#array, #separator_position - 1)
INSERT #IntTable
VALUES (CAST(#array_value AS INT))
SELECT #array = stuff(#array, 1, #separator_position, '')
END
RETURN
END
CsvToInt function taken from http://www.summit-pro.com/blog/2010/05/18/csv-list-to-int-sql-function/
Is there a way to query the parameters passed into a stored procedure and return them as XML without creating a string of the parameters and then casting that as xml? I'm looking for something generic, that will work for most SPs without having to physicially code it each time?
I have a bunch of stored procedures that access and modify verify specific information. At the end of the SPs I want to insert into a logging table the name of the SP, and the parameters (in xml) that were used to invoke the SP. I know how to get the name of the SP, and I know how to get a list of the parameters for the SP. What I want is a way to mash it all into XML along the actual values of the parameters that were passed.
I'm looking for something that does this, without the manual coding of each parameter:
DECLARE #L_Data varchar(1500)
SET #L_Data = '<parms>' +
CASE WHEN #ParamRegStationID IS NULL THEN ''
ELSE ',#ParamRegStationID=''' + Convert(varchar, #ParamRegStationID) + '''' END +
CASE WHEN #ParamScheduleID IS NULL THEN ''
ELSE ',#ParamScheduleID=''' + Convert(varchar, #ParamScheduleID) + '''' END +
CASE WHEN #ParamPatientID IS NULL THEN ''
ELSE ',#ParamPatientID=''' + Convert(varchar, #ParamPatientID) + '''' END +
CASE WHEN #ParamHISPatientID IS NULL THEN ''
ELSE ',#ParamHISPatientID=''' + #ParamHISPatientID + '''' END +
CASE WHEN #ParamEvent IS NULL THEN ''
ELSE ',#ParamEvent=''' + #ParamEvent + '''' END +
'</parms>'
This doesn't work, and it isn't as elegant as what I'm hoping for. However, here is an example illustrating what I'm trying to ultimately get to. It creates the temp table, but doesn't add the parameters to it as columns, so I can later extract it as XML.
ALTER PROC uspTest
#ParamID as bigint=null,
#ParamXYZ as varchar(255)=null
as
-- PROC Does whatever it is going to do ....
DECLARE #ProcName varchar(128), #ParmName varchar(128), #ParmType varchar(128), #ParmLen int,
#ParmSQL varchar(1000)
select #ProcName=OBJECT_NAME(##PROCID)
--select * from INFORMATION_SCHEMA.ROUTINES where ROUTINE_TYPE='PROCEDURE' and ROUTINE_NAME=#ProcName
DECLARE csrParms CURSOR
FOR
select PARAMETER_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH from INFORMATION_SCHEMA.PARAMETERS where SPECIFIC_NAME=#ProcName and PARAMETER_MODE='IN'
ORDER BY ORDINAL_POSITION
FOR READ ONLY
OPEN csrParms
FETCH NEXT FROM csrParms
INTO #ParmName, #ParmType, #ParmLen
CREATE TABLE #Parms(ID int identity(1,1), Created DateTime)
INSERT INTO #Parms select GETDATE()
WHILE ##FETCH_STATUS = 0
BEGIN
-- GET Parm value and format as xml attribute to save parm
SET #ParmSQL = 'ALTER TABLE #Parms add ' + #ParmName + ' varchar(' + CAST(ISNULL(#ParmLen, 128) as varchar(128)) + ') NULL '
print #ParmSQL
EXEC (#ParmSQL)
SET #ParmSQL = 'UPDATE #Parms SET ' + #ParmName + ' = ''????'''
print #ParmSQL
--EXEC (#ParmSQL)
FETCH NEXT FROM csrParms
INTO #ParmName, #ParmType, #ParmLen
END
SET #ParmSQL = CAST((select * from #Parms FOR XML RAW) as varchar(1000))
select #ParmSQL
CLOSE csrParms
DEALLOCATE csrParms
This is close to what I'm looking for, I need to know how to replace the ??? with the current value of the parameter dynamically though.
ALTER PROC uspTest
#ParamID as bigint=null,
#ParamXYZ as varchar(255)=null
as
-- PROC Does whatever it is going to do ....
DECLARE #ProcName varchar(128), #ParmName varchar(128), #ParmType varchar(128), #ParmLen int,
#ParmSQL varchar(1000)
select #ProcName=OBJECT_NAME(##PROCID)
--select * from INFORMATION_SCHEMA.ROUTINES where ROUTINE_TYPE='PROCEDURE' and ROUTINE_NAME=#ProcName
set #ParmSQL =
' CREATE TABLE #Parms(ID int identity(1,1), Created DateTime, ' +
STUFF((select (', ' + REPLACE(PARAMETER_NAME,'#','') + ' varchar(' + CAST(ISNULL(CHARACTER_MAXIMUM_LENGTH, 128) as varchar(128)) + ') NULL ')
from INFORMATION_SCHEMA.PARAMETERS where SPECIFIC_NAME='uspTest' and PARAMETER_MODE='IN'
order by ORDINAL_POSITION for XML path(''), type).value('.', 'varchar(max)'), 1, 2, '')
+ ');
' + 'INSERT INTO #Parms (Created) select GETDATE(); ' + STUFF((select (';
UPDATE #Parms SET ' + REPLACE(PARAMETER_NAME,'#','') + ' = ''???''')
from INFORMATION_SCHEMA.PARAMETERS where SPECIFIC_NAME='uspTest' and PARAMETER_MODE='IN'
order by ORDINAL_POSITION for XML path(''), type).value('.', 'varchar(max)'), 1, 2, '')
+ ';
select CAST((select * from #Parms FOR XML RAW) as varchar(1000));'
print #ParmSQL
EXEC (#ParmSQL)
When I execute the proc as:
EXEC uspTest 1, 'test'
Returns:
<row ID="1" Created="2012-04-20T09:44:43.700" ParamID="???" ParamXYZ="???"/>
Prints out:
CREATE TABLE #Parms(ID int identity(1,1), Created DateTime, ParamID varchar(128) NULL , ParamXYZ varchar(255) NULL );
INSERT INTO #Parms (Created) select GETDATE();
UPDATE #Parms SET ParamID = '???';
UPDATE #Parms SET ParamXYZ = '???';
select CAST((select * from #Parms FOR XML RAW) as varchar(1000));
Is this SQL Server 2000 or later? If so you could use the FOR XML clause:
DECLARE #p1 varchar(100) = 'blah'
, #p2 int = 1
, #p3 datetime2(7) = '2011-01-01 13:41'
;
SELECT #p1 StringParm
, #p2 IntParm
, #p3 DateParm
FOR XML RAW
returns:
<row StringParm="blah" IntParm="1" DateParm="2011-01-01T13:41:00"/>
Edit
Ah, the problem there is that you need to parse out the parameter list as well as the values (which are local) into dynamic SQL (where they'd be out of scope).
I suppose you could use INFORMATION_SCHEMA.PARAMETERS to dynamically list the parameters and dbcc_inputbuffers to get the actual values passed. Something like:
create procedure junk
( #int INT
, #string VARCHAR(20)
, #date DATE
)
AS
BEGIN
DECLARE #tmp TABLE
( EventType NVARCHAR(30)
, PARMS INT
, Info NVARCHAR(2000)
);
DECLARE #object NVARCHAR(200);
INSERT INTO #tmp
EXEC('DBCC INPUTBUFFER(##SPID) WITH NO_INFOMSGS');
SELECT INFO
, 'Call' lType
FROM #tmp
UNION
SELECT STUFF(
( SELECT ', ' + parameter_name
FROM INFORMATION_SCHEMA.PARAMETERS
WHERE SPECIFIC_NAME = OBJECT_NAME(##procid)
ORDER BY ORDINAL_POSITION
FOR XML PATH('')
)
, 1
, 2
, ''
)
, 'Parms';
END
That now makes it so that:
exec dbo.junk #int = 3, #string = 'hoo', #date = '2/2/2002';
Returns:
exec dbo.junk #int = 3, #string = 'hoo', #date = '2/2/2002'; Call
#int, #string, #date Parms
Which should get you a ways along. The tricky bit is that DBCC_INPUTBUFFERS returns the EXACT call string. So you'd need to write code to parse out the call to match the input line to the parameter list. If you go that route, you'll likely want a stored function that does the parsing. It would likely take the call string and parameter list something like the return values above, match them, and use the FOR XML clause to return the format you want.
You could also parse call string in a cursor tied to the parameter list. Then you'd pull the parameters in order and look for the commas and #'s. You could still have trouble with parameter values that included those characters if you didn't take that into account.
IMHO, getting that squared away seems like a lot of work compared to a simple select which can almost be copied/pasted from the function header. Of course, if you're talking about a large volume of procedures then it might be worth it. Either way, good luck and thanks for a thought-provoking question.
Consider a situation we have two variables in SQL Server 2005's SP as below,
#string1 = 'a,b,c,d'
#string2 = 'c,d,e,f,g'
Is there a solution to get a new string out of that like (#string1 U #string2) without using any loops. i.e the final string should be like,
#string3 = 'a,b,c,d,e,f,g'
In case you need to do this as a set and not one row at a time. Given the following split function:
USE tempdb;
GO
CREATE FUNCTION dbo.SplitStrings(#List nvarchar(max))
RETURNS TABLE
AS
RETURN ( SELECT Item FROM
( SELECT Item = x.i.value(N'./text()[1]', N'nvarchar(max)')
FROM ( SELECT [XML] = CONVERT(xml, '<i>'
+ REPLACE(#List,',', '</i><i>') + '</i>').query('.')
) AS a CROSS APPLY [XML].nodes('i') AS x(i) ) AS y
WHERE Item IS NOT NULL
);
GO
Then with the following table and sample data, and string variable, you can get all of the results this way:
DECLARE #foo TABLE(ID INT IDENTITY(1,1), col NVARCHAR(MAX));
INSERT #foo(col) SELECT N'c,d,e,f,g';
INSERT #foo(col) SELECT N'c,e,b';
INSERT #foo(col) SELECT N'd,e,f,x,a,e';
DECLARE #string NVARCHAR(MAX) = N'a,b,c,d';
;WITH x AS
(
SELECT f.ID, c.Item FROM #foo AS f
CROSS APPLY dbo.SplitStrings(f.col) AS c
), y AS
(
SELECT ID, Item FROM x
UNION
SELECT x.ID, s.Item
FROM dbo.SplitStrings(#string) AS s
CROSS JOIN x
)
SELECT ID, Items = STUFF((SELECT ',' + Item
FROM y AS y2 WHERE y2.ID = y.ID
FOR XML PATH(''), TYPE).value(N'./text()[1]', N'nvarchar(max)'), 1, 1, N'')
FROM y
GROUP BY ID;
Results:
ID Items
-- ----------
1 a,b,c,d,e,f,g
2 a,b,c,d,e
3 a,b,c,d,e,f,x
On newer versions (SQL Server 2017+), the query is much simpler, and you don't need to create your own custom string-splitting function:
;WITH x AS
(
SELECT f.ID, c.value FROM #foo AS f
CROSS APPLY STRING_SPLIT
(
CONCAT(f.col, N',', #string), N','
) AS c GROUP BY f.ID, c.value
)
SELECT ID, STRING_AGG(value, N',')
WITHIN GROUP (ORDER BY value)
FROM x GROUP BY ID;
Example db<>fiddle
Now that all said, what you really should do is follow the previous advice and store these things in a related table in the first place. You can use the same type of splitting methodology to store the strings separately whenever an insert or update happens, instead of just dumping the CSV into a single column, and your applications shouldn't really have to change the way they're passing data into your procedures. But it sure will be easier to get the data out!
EDIT
Adding a potential solution for SQL Server 2008 that is a bit more convoluted but gets things done with one less loop (using a massive table scan and replace instead). I don't think this is any better than the solution above, and it is certainly less maintainable, but it is an option to test out should you find you are able to upgrade to 2008 or better (and also for any 2008+ users who come across this question).
SET NOCOUNT ON;
-- let's pretend this is our static table:
CREATE TABLE #x
(
ID int IDENTITY(1,1),
col nvarchar(max)
);
INSERT #x(col) VALUES(N'c,d,e,f,g'), (N'c,e,b'), (N'd,e,f,x,a,e');
-- and here is our parameter:
DECLARE #string nvarchar(max) = N'a,b,c,d';
The code:
DECLARE #sql nvarchar(max) = N'DECLARE #src TABLE(ID INT, col NVARCHAR(32));
DECLARE #dest TABLE(ID int, col nvarchar(32));';
SELECT #sql += '
INSERT #src VALUES(' + RTRIM(ID) + ','''
+ REPLACE(col, ',', '''),(' + RTRIM(ID) + ',''') + ''');'
FROM #x;
SELECT #sql += '
INSERT #dest VALUES(' + RTRIM(ID) + ','''
+ REPLACE(#string, ',', '''),(' + RTRIM(ID) + ',''') + ''');'
FROM #x;
SELECT #sql += '
WITH x AS (SELECT ID, col FROM #src UNION SELECT ID, col FROM #dest)
SELECT DISTINCT ID, Items = STUFF((SELECT '','' + col
FROM x AS x2 WHERE x2.ID = x.ID FOR XML PATH('''')), 1, 1, N'''')
FROM x;'
EXEC sys.sp_executesql #sql;
GO
DROP TABLE #x;
This is much trickier to do in 2005 (though not impossible) because you need to change the VALUES() clauses to UNION ALL...
Two ways you can do that:
Build a CLR function to do the job for you. Move the logic back to .NET code which is much easier platform for string manipulation.
If you have to use SQL Server, then you will need to:
"explode" the two strings into two tables, this function might help: http://blog.logiclabz.com/sql-server/split-function-in-sql-server-to-break-comma-separated-strings-into-table.aspx
Get a unique list of strings from the two tables. (simple query)
"implode" the two string tables into a variable (http://stackoverflow.com/questions/194852/concatenate-many-rows-into-a-single-text-string)
Found this function dbo.Split in a related answer, which you can use like this:
declare #string1 nvarchar(50) = 'a,b,c,d'
declare #string2 nvarchar(50) = 'c,d,e,f,g'
select * from dbo.split(#string1, ',')
select * from dbo.split(#string2, ',')
declare #data nvarchar(100) = ''
select #data = #data + ',' + Data from (
select Data from dbo.split(#string1, ',')
union
select Data from dbo.split(#string2, ',')
) as d
select substring(#data, 2, LEN(#data))
The last SELECT returns
a,b,c,d,e,f,g
How about
set #string3 = #string1+','+#string2
Sorry, wasn't clear you wanted only unique occurrences. What version of SQL server are you using? String manipulation functions vary per version.
If you don't mind a UDF to split the string, try this:
CREATE FUNCTION dbo.Split
(
#RowData nvarchar(2000),
#SplitOn nvarchar(5)
)
RETURNS #RtnValue table
(
Id int identity(1,1),
Data nvarchar(100)
)
AS
BEGIN
Declare #Cnt int
declare #data varchar(100)
Set #Cnt = 1
While (Charindex(#SplitOn,#RowData)>0)
Begin
Insert Into #RtnValue (data)
Select ltrim(rtrim(Substring(#RowData,1,Charindex(#SplitOn,#RowData)-1)))
Set #RowData = Substring(#RowData,Charindex(#SplitOn,#RowData)+1,len(#RowData))
Set #Cnt = #Cnt + 1
End
Insert Into #RtnValue (data)
Select Data = ltrim(rtrim(#RowData))
Return
END
and the code to use the UDF
go
#string1 = 'a,b,c,d'
#string2 = 'c,d,e,f,g'
declare #string3 varchar(200)
set #string3 = ''
select #string3 = #string3+data+','
from ( select data,min(id) as Id from dbo.split(#string1+','+#string2,',')
group by data ) xx
order by xx.id
print left(#string3,len(#string3)-1)
The following SQL function will convert a comma separated list to a table variable...
CREATE FUNCTION [dbo].[udfCsvToTable]( #CsvString VARCHAR( 8000))
-- Converts a comman separated value into a table variable
RETURNS #tbl TABLE( [Value] VARCHAR( 100) COLLATE DATABASE_DEFAULT NOT NULL)
AS BEGIN
DECLARE #Text VARCHAR( 100)
SET #CsvString = RTRIM( LTRIM( #CsvString))
SET #CsvString = REPLACE( #CsvString, CHAR( 9), '')
SET #CsvString = REPLACE( #CsvString, CHAR( 10), '')
SET #CsvString = REPLACE( #CsvString, CHAR( 13), '')
IF LEN( #CsvString) < 1 RETURN
WHILE LEN( #CsvString) > 0 BEGIN
IF CHARINDEX( ',', #CsvString) > 0 BEGIN
SET #Text = LEFT( #CsvString, CHARINDEX( ',', #CsvString) - 1)
SET #CsvString = LTRIM( RTRIM( RIGHT( #CsvString, LEN( #CsvString) - CHARINDEX( ',', #CsvString))))
END
ELSE BEGIN
SET #Text = #CsvString
SET #CsvString = ''
END
INSERT #tbl VALUES( LTRIM( RTRIM( #Text)))
END
RETURN
END
You can then union the two tables together, like so...
SELECT * FROM udfCsvToTable('a,b,c,d')
UNION
SELECT * FROM udfCsvToTable('c,d,e,f,g')
Which will give you a result set of:
a
b
c
d
e
f
g