saving the FOR XML AUTO results to variable in SQL - sql

select #result=#input.query('*')
for xml raw,type
Above statement will generate following alert:
Msg 6819, Level 16, State 3, Line 2
The FOR XML clause is not allowed in a ASSIGNMENT statement.

For example
DECLARE #xml_var XML
SET #xml_var =
(
SELECT *,
(
SELECT *
FROM Orders
WHERE Orders.CustomerID=Customers.CustomerID
FOR XML AUTO, TYPE
)
FROM Customers WHERE CustomerID='ALFKI'
FOR XML AUTO, TYPE
)
refer to :
http://blogs.msdn.com/sqlprogrammability/articles/576095.aspx

DECLARE #RESULTS XML
SET #RESULTS = (SELECT * FROM Table_Name FOR XML AUTO)
SELECT #RESULTS

For those using SQL SERVER 2000, FOR XML is next to useless; You cannot assign the generated XML into a variable, insert into a table, etc. I had to do it manually when required, here's my sample:
SET NOCOUNT ON
create table #parcelData (
parcelID int
, [description] varchar(20)
)
--insert sample data
insert into #parcelData
select 1, 'apples' UNION
select 3, 'oranges' UNION
select 25, 'bananas' UNION
select 69, 'maracuja'
print '-- Example 1: FOR XML RAW --'
select parcelID as pID
from #parcelData
ORDER BY parcelID
FOR XML RAW
--> <row pID="17"/><row pID="25"/><row pID="26"/><row pID="333"/>
print '-- Example 2: Build XML Manually --'
declare #parcelRow varchar(50), #dummy int
, #xmlRowStr varchar(8000)
set #xmlRowStr = '' -- initialize.
DECLARE parcel_cursor CURSOR FOR
select DISTINCT '<row pID="' + cast(parcelID as varchar(10)) + '"/>', parcelID as parcelRow
from #parcelData
ORDER BY parcelID
OPEN parcel_cursor
FETCH NEXT FROM parcel_cursor
INTO #parcelRow, #dummy
WHILE ##FETCH_STATUS = 0 BEGIN
-- build the xml row by row.
set #xmlRowStr = #xmlRowStr + #parcelRow
-- Get next row
FETCH NEXT FROM parcel_cursor
INTO #parcelRow, #dummy
END
CLOSE parcel_cursor
DEALLOCATE parcel_cursor
--print #xmlRowStr
-- wrap a root element around
if #xmlRowStr != '' begin
set #xmlRowStr = '<ROOT>' + #xmlRowStr + '</ROOT>'
end
select #xmlRowStr as XmlOut
DROP TABLE #parcelData
SET NOCOUNT OFF

Related

how to find all strings in between commas in sql server

I want to display a string in a table format as shown below:
For a string like 'hi,is,1,question,thanks,.,.,n'
I need this result:
column1 column2 column3 column4 ..... column
hi is 1 question ..... n
DECLARE #string VARCHAR(MAX);
SET #string = 'hi,is,1,question,thanks,.,.,n';
DECLARE #SQL VARCHAR(MAX);
SET #SQL = 'SELECT ''' + REPLACE(#string, ',', ''',''') + '''';
EXEC (#SQL);
Result:
Add SELECT ' at beginning and ' at the end of string
Replace all , with ',' inside string
So string 'hi,is,1,question,thanks,.,.,n' is replace by 'SELECT 'hi','is','1','question','thanks','.','.','n''
Executed as SQL query
PS: If you want to use it on column you will have to combine it with CURSOR
Update
DECLARE #table TABLE
(
ID INT IDENTITY,
string VARCHAR(MAX)
);
INSERT INTO #table
VALUES
('This,is,a,string,,n,elements,..');
INSERT INTO #table
VALUES
('And,one,more');
INSERT INTO #table
VALUES
('Ugly,but,works,,,Yay!,..,,,10,11,12,13,14,15,16,17,18,19,..');
SELECT * FROM #table
DECLARE #string_to_split VARCHAR(MAX);
DECLARE #sql_query_to_execute VARCHAR(MAX);
DECLARE #max_elements INT, #id INT, #i INT;
SET #i = 1;
DECLARE string_cursor CURSOR FOR SELECT ID, string FROM #table;
SELECT #max_elements = MAX(LEN(string) - LEN(REPLACE(string, ',', ''))) + 1 -- Find max number of elements */
FROM #table;
IF OBJECT_ID('tempdb..##my_temp_table_for_splitted_columns') <> 0 -- Create new temp table with valid amount of columns
DROP TABLE ##my_temp_table_for_splited_columns;
SET #sql_query_to_execute = 'create table ##my_temp_table_for_splitted_columns ( ID int,';
WHILE #i <= #max_elements
BEGIN
SET #sql_query_to_execute = #sql_query_to_execute + ' Col' + CAST(#i AS VARCHAR(max)) + ' varchar(25), ';
SET #i = #i + 1;
END;
SELECT #sql_query_to_execute = SUBSTRING(#sql_query_to_execute, 1, LEN(#sql_query_to_execute) - 1) + ')';
EXEC (#sql_query_to_execute);
/* Split string for each row */
OPEN string_cursor;
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split
WHILE ##FETCH_STATUS = 0
BEGIN
SET #i = MAX(LEN(#string_to_split) - LEN(REPLACE(#string_to_split, ',', ''))) + 1; -- check amount of columns for current string
WHILE #i < #max_elements
BEGIN
SET #string_to_split = #string_to_split + ','; -- add missing columns
SET #i = #i + 1;
END;
SET #sql_query_to_execute = 'SELECT ' + CAST(#id AS VARCHAR(MAX)) + ',''' + REPLACE(#string_to_split, ',', ''',''') + '''';
INSERT INTO ##my_temp_table_for_splitted_columns --insert result to temp table
EXEC (#sql_query_to_execute);
FETCH NEXT FROM string_cursor
INTO #id,
#string_to_split;
END;
CLOSE string_cursor;
DEALLOCATE string_cursor;
SELECT *
FROM ##my_temp_table_for_splitted_columns;
This is not trivial. You will find a lot of examples how to split your string in a set of fragments. And you will find a lot of examples how to pivot a row set to a single row. But - adding quite some difficulty - you have an unknown count of columns. There are three approaches:
Split this and return your set with a known maximum of columns
Use a dynamically created statement and use EXEC. But this will not work in VIEWs or iTVFs, nor will it work against a table.
Instead of a column list you return a generic container like XML
with a known maximum of columns
One example for the first was this
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT('Column',A.[key]+1) AS ColumnName
FROM OPENJSON('["' + REPLACE(#str,',','","') + '"]') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(Column1,Column2,Column3,Column4,Column5,Column6,Column7,Column8,Column9 /*add as many as you need*/)
) p
Hint: My approach to split the string uses OPENJSON, not available before version 2016. But there are many other approaches you'll find easily. It's just an example to show you the combination of a splitter with PIVOT using a running index to build up a column name.
Unknown count of columns
And the same example with a dynamically created column list was this:
DECLARE #str VARCHAR(1000)='This,is,a,string,with,n,elements,...';
DECLARE #CountElements INT=LEN(#str)-LEN(REPLACE(#str,',',''))+1;
DECLARE #columnList NVARCHAR(MAX)=
STUFF((
SELECT TOP(#CountElements)
CONCAT(',Column',ROW_NUMBER() OVER(ORDER BY (SELECT 1)))
FROM master..spt_values /*has a lot of rows*/
FOR XML PATH('')
),1,1,'');
DECLARE #Command NVARCHAR(MAX)=
N'SELECT p.*
FROM
(
SELECT A.[value]
,CONCAT(''Column'',A.[key]+1) AS ColumnName
FROM OPENJSON(''["'' + REPLACE(''' + #str + ''','','',''","'') + ''"]'') A
) t
PIVOT
(
MAX(t.[value]) FOR ColumnName IN(' + #columnList + ')
) p;';
EXEC(#Command);
Hint: The statement created is exactly the same as above. But the column list in the pivot's IN is created dynamically. This will work with (almost) any count of words generically.
If you need more help, please use the edit option of your question and provide some more details.
An inlineable approach for a table returning a generic container
If you need this against a table, you might try something along this:
DECLARE #tbl TABLE(ID INT IDENTITY,YourList NVARCHAR(MAX));
INSERT INTO #tbl VALUES('This,is,a,string,with,n,elements,...')
,('And,one,more');
SELECT *
,CAST('<x>' + REPLACE((SELECT t.YourList AS [*] FOR XML PATH('')),',','</x><x>') + '</x>' AS XML) AS Splitted
FROM #tbl t
This will return your list as an XML like
<x>This</x>
<x>is</x>
<x>a</x>
<x>string</x>
<x>with</x>
<x>n</x>
<x>elements</x>
<x>...</x>
You can grab - if needed - each element by its index like here
TheXml.value('/x[1]','nvarchar(max)') AS Element1

Is it possible to remove the cursor from this dynamic query?

Simplified example of what I have:
Two tables (table_1 and table_2) which have one similar column (Id) but also several "payload" columns with different names (col_1_1, col_2_1, col_2_2). Amount of "payload" columns is different for different tables.
I am interested in extracting the IDs from both tables into another table for rows which have all "payload" columns empty.
There is a list of all "payload" columns for all tables which can be used (#temp)
This is how it is done with cursor:
CREATE TABLE #temp (tab nvarchar(20) not null, col nvarchar(20) not null)
INSERT INTO #temp SELECT 'table_1','col_1_1' UNION SELECT 'table_2','col_2_1' UNION SELECT 'table_2','col_2_2'
DECLARE #table_name nvarchar(20)
DECLARE #sql nvarchar(max)
DECLARE curs CURSOR FOR (SELECT DISTINCT tab FROM #temp)
OPEN curs
FETCH NEXT FROM curs INTO #table_name
WHILE ##FETCH_STATUS = 0
BEGIN
SELECT #sql = ISNULL(#sql,'')+col+' IS NULL AND ' FROM #temp WHERE tab = #table_name
SET #sql += 'Id IS NOT NULL'
SET #sql = 'INSERT INTO #temp_master SELECT ID FROM '+#table_name+' WHERE '+#sql
print #sql
SET #sql = ''
FETCH NEXT FROM curs INTO #table_name
END
CLOSE curs
DEALLOCATE curs
This is the result:
INSERT INTO #temp_master SELECT ID FROM table_1 WHERE col_1_1 IS NULL AND Id IS NOT NULL
INSERT INTO #temp_master SELECT ID FROM table_2 WHERE col_2_1 IS NULL AND col_2_2 IS NULL AND Id IS NOT NULL
Is it possible to remove the cursor to get the same resulting dynamic query? The problem is that I am unable to have dynamic "IS NULL AND" part for different tables when I remove the cursor.
It's possible to get rid of that cursor. This is probably what you need:
CREATE TABLE #temp (tab nvarchar(20) not null, col nvarchar(20) not null)
INSERT INTO #temp SELECT 'table_1','col_1_1' UNION SELECT 'table_2','col_2_1' UNION SELECT 'table_2','col_2_2'
DECLARE #table_name nvarchar(20)
DECLARE #sql nvarchar(max) = ''
select #sql = 'INSERT INTO #temp_master SELECT ID FROM ' + t.tab + ' WHERE Id IS NOT NULL AND ' + substring(t.cols, 0, len(t.cols)-3) + '
' + #sql from
(
SELECT
distinct
t2.tab,
stuff(
(
select t1.col + cast(' IS NULL AND ' as varchar(max))
from #temp t1
WHERE t1.tab = t2.tab
order by t1.tab
for xml path('')
), 1, 0, '') AS cols
FROM
#temp t2
) as t
order by t.tab desc
print #sql
drop table #temp
That is a regular CONCAT question, you can find many approaches to accomplish it without cursor. One of approaches is a cursor by the way and it's not that bad for such a task.
Another and more popular - FOR XML which can guarantee row order if any defined:
DECLARE #sql VARCHAR(MAX)
CREATE TABLE #temp (tab nvarchar(20) not null, col nvarchar(20) not null)
INSERT INTO #temp SELECT 'table_1','col_1_1' UNION SELECT 'table_2','col_2_1' UNION SELECT 'table_2','col_2_2'
SET #sql = (SELECT (
SELECT '
INSERT INTO #temp_master (ID) SELECT t.ID FROM '+t.tab +' t WHERE t.Id IS NOT NULL'
+ (select ' AND t.' + tt.col + ' is NULL' from #temp tt WHERE tt.tab = t.tab FOR XML PATH(''), TYPE).value('.', 'varchar(max)')
FROM #temp t
GROUP BY t.tab
FOR XML PATH(''), TYPE).value('.', 'varchar(max)'))
PRINT #sql
DROP TABLE #temp
A little "tricky" thing is that you have two things to collaps:
many queries (separate for specific table)
many columns per table
So you there is one inner FOR XML to collapse columns per table and another - to combine all queries into one big script.

Improve SQL Server query to convert arbitrary table to JSON

After a lot of searching and piecing together the very excellent techniques for converting result sets using the FOR XML and .nodes() commands that are around the web, I was able to create this single query (not a stored procedure) which does a reasonably good job of converting any arbitrary SQL query to a JSON array.
The query will encode each data row as a single JSON object with a leading comma.
The data rows are wrapped by brackets and the whole result set is then expected to be exported to a file.
I'd like to see if anyone out there can see ways to improve its performance?
Here's the query with a sample table:
declare #xd table (col1 varchar(max), col2 int, col3 real, colNull int)
insert into #xd
select '', null, null, null
UNION ALL select 'ItemA', 123, 123.123, null
UNION ALL select 'ItemB', 456, 456.456, null
UNION ALL select '7890', 789, 789.789, null
select '[{}'
UNION ALL
select ',{' + STUFF((
(select ','
+ '"' + r.value('local-name(.)', 'varchar(max)') + '":'
+ case when r.value('./#xsi:nil', 'varchar(max)') = 'true' then 'null'
when isnumeric(r.value('.', 'varchar(max)')) = 1
then r.value('.', 'varchar(max)')
else '"' + r.value('.', 'varchar(max)') + '"'
end
from rows.nodes('/row/*') as x(r) for xml path(''))
), 1, 1, '') + '}'
from (
-- Arbitrary query goes here, (fields go where t.* is, table where #xd t is)
select (select t.* for xml raw,type,elements XSINIL) rows
from #xd t
) xd
UNION ALL
select ']'
My biggest critique of it, is that it's insanely slow.
It currently takes about 3:30 for ~42,000 rows.
My other big critique is that it currently assumes that everything that looks like a number is a number. It doesn't try to discover column type in the least (and I'm not even sure if it can).
A final minor critique is that the first data row will have a comma up front and technically it shouldn't. To compensate for that it requires that empty JSON object in the first row that starts the JSON array.
Other critiques (preferably with solutions) invited, the only real limitation I have is that the solution be decently repeatable on many arbitrary SQL queries without having to explicitly identify the column names.
I'm using SQL Server 2012.
Thanks and to anyone else like me who was looking for a generalized SQL Results -> JSON Array converter, ENJOY!
I say if you really want to kick up performance, use metaprogramming. The example below tries this with 40,000 rows and returns results in less than a second (not counting inserting the initial 40k rows, which in this example only takes about 2 seconds). It also takes into account your data types to not enclose numbers in quotes.
declare #xd table (col1 varchar(max), col2 int, col3 real, colDate datetime, colNull int);
declare #i int = 0;
while #i < 10000 begin
set #i += 1;
insert into #xd
select '', null, null, null, null
union all select 'ItemA', 123, 123.123, getDate(), null
union all select 'ItemB', 456, 456.456, getDate(), null
union all select '7890', 789, 789.789, getDate(), null;
end;
select *
into #json_base
from (
-- Insert SQL Statement here
select * from #xd
) t;
declare #columns table (
id int identity primary key,
name sysname,
datatype sysname,
is_number bit,
is_date bit);
insert into #columns(name, datatype, is_number, is_date)
select columns.name, types.name,
case when number_types.name is not NULL
then 1 else 0
end as is_number,
case when date_types.name is not NULL
then 1 else 0
end as is_date
from tempdb.sys.columns
join tempdb.sys.types
on (columns.system_type_id = types.system_type_id)
left join (values ('int'), ('real'), ('numeric'),
('decimal'), ('bigint'), ('tinyint')) as number_types(name)
on (types.name = number_types.name)
left join (values ('date'), ('datetime'), ('datetime2'),
('smalldatetime'), ('time'), ('datetimeoffset')) as date_types(name)
on (types.name = date_types.name)
where object_id = OBJECT_ID('tempdb..#json_base');
declare #field_list varchar(max) = STUFF((
select '+'',''+' + QUOTENAME(QUOTENAME(name, '"') + ':', '''')
+ '+' + case when is_number = 1
then 'COALESCE(LTRIM('
+ QUOTENAME(name) + '),''null'')'
when is_date = 1
then 'COALESCE(QUOTENAME(LTRIM(convert(varchar(max), '
+ QUOTENAME(name) + ', 126)),''"''),''null'')'
else 'COALESCE(QUOTENAME('
+ QUOTENAME(name) + ',''"''),''null'')'
end
from #columns
for xml path('')),
1, 5, '');
create table #json_result (
id int identity primary key,
line varchar(max));
declare #sql varchar(max) = REPLACE(
'insert into #json_result '
+ 'select '',{''+{f}+''}'' '
+ 'from #json_base', '{f}', #field_list);
exec(#sql);
update #json_result
set line = STUFF(line, 1, 1, '')
where id = 1;
select '['
UNION ALL
select line
from #json_result
UNION ALL
select ']';
drop table #json_base;
drop table #json_result;
From Firoz Ansari:
CREATE PROCEDURE [dbo].[GetJSON] (
#ParameterSQL AS VARCHAR(MAX)
)
AS
BEGIN
DECLARE #SQL NVARCHAR(MAX)
DECLARE #XMLString VARCHAR(MAX)
DECLARE #XML XML
DECLARE #Paramlist NVARCHAR(1000)
SET #Paramlist = N'#XML XML OUTPUT'
SET #SQL = 'WITH PrepareTable (XMLString) '
SET #SQL = #SQL + 'AS ( '
SET #SQL = #SQL + #ParameterSQL+ ' FOR XML RAW, TYPE, ELEMENTS '
SET #SQL = #SQL + ') '
SET #SQL = #SQL + 'SELECT #XML = XMLString FROM PrepareTable '
EXEC sp_executesql #SQL, #Paramlist, #XML=#XML OUTPUT
SET #XMLString = CAST(#XML AS VARCHAR(MAX))
DECLARE #JSON VARCHAR(MAX)
DECLARE #Row VARCHAR(MAX)
DECLARE #RowStart INT
DECLARE #RowEnd INT
DECLARE #FieldStart INT
DECLARE #FieldEnd INT
DECLARE #Key VARCHAR(MAX)
DECLARE #Value VARCHAR(MAX)
DECLARE #StartRoot VARCHAR(100); SET #StartRoot = ''
DECLARE #EndRoot VARCHAR(100); SET #EndRoot = ''
DECLARE #StartField VARCHAR(100); SET #StartField = ''
SET #RowStart = CharIndex(#StartRoot, #XMLString, 0)
SET #JSON = ''
WHILE #RowStart > 0
BEGIN
SET #RowStart = #RowStart+Len(#StartRoot)
SET #RowEnd = CharIndex(#EndRoot, #XMLString, #RowStart)
SET #Row = SubString(#XMLString, #RowStart, #RowEnd-#RowStart)
SET #JSON = #JSON+'{'
-- for each row
SET #FieldStart = CharIndex(#StartField, #Row, 0)
WHILE #FieldStart > 0
BEGIN
-- parse node key
SET #FieldStart = #FieldStart+Len(#StartField)
SET #FieldEnd = CharIndex(#EndField, #Row, #FieldStart)
SET #Key = SubString(#Row, #FieldStart, #FieldEnd-#FieldStart)
SET #JSON = #JSON+'"'+#Key+'":'
-- parse node value
SET #FieldStart = #FieldEnd+1
SET #FieldEnd = CharIndex('0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = #JSON+'},'
--/ for each row
SET #RowStart = CharIndex(#StartRoot, #XMLString, #RowEnd)
END
IF LEN(#JSON) > 0 SET #JSON = SubString(#JSON, 0, LEN(#JSON))
SET #JSON = '[' + #JSON + ']'
SELECT #JSON
END

SQL- fetch value from XML parent node (separately)

In my below Query it's return all the recode set as a XML into a single variable.But i need all the parent node values into separate while loop.Just run the below query:
---------Just Declare the temp Table -------------------------------------------------
IF OBJECT_ID('tempdb.dbo.##TestTable','U')IS NOT NULL DROP TABLE ##TestTable
CREATE TABLE ##TestTable(id int,Uname nvarchar(max),Uaddress nvarchar(max))
INSERT INTO ##TestTable values (1,'abc','NY')
INSERT INTO ##TestTable values (2,'def','WD')
INSERT INTO ##TestTable values (3,'','KL')
DECLARE #XML XML
DECLARE #WhereClause nvarchar(max)
DECLARE #CountVal int
SELECT #CountVal=count(*) from ##TestTable
SET #XML= (SELECT * FROM ##TestTable FOR XML PATH('ParentNode'), ELEMENTS XSINIL)
SELECT #XML
;with cte as
( select xr.value('fn:local-name(.)','nvarchar(max)') name, xr.value('.','nvarchar(max)') val from #xml.nodes('//.') xq(xr)
where xr.value('fn:local-name(.)','nvarchar(max)')<>''
)
SELECT #WhereClause= STUFF((select ' and ' + name + '='''+val+'''' from cte for xml path('')),1,4,'')
SELECT #WhereClause
WHILE (#CountVal>0)
BEGIN
SELECT #WhereClause
SET #CountVal=#CountVal-1
END
IF OBJECT_ID('tempdb.dbo.##TestTable','U')IS NOT NULL DROP TABLE ##TestTable
Current sample XML(in #XML):
<ParentNode xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><id>1</id><Uname>abc</Uname><Uaddress>NY</Uaddress></ParentNode><ParentNode xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><id>2</id><Uname>def</Uname><Uaddress>WD</Uaddress></ParentNode><ParentNode xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><id>3</id><Uname /><Uaddress>KL</Uaddress></ParentNode>
Current the output of #WhereClause is (all into in a single #WhereClause variable):
ParentNode='1abcNY' and id='1' and Uname='abc' and Uaddress='NY' and ParentNode='2defWD' and id='2' and Uname='def' and Uaddress='WD' and ParentNode='3KL' and id='3' and Uname='' and Uaddress='KL'
But my Expected output is:
Firstly(in #WhereClause): id='1' and Uname='abc' and Uaddress='NY'
Secondly(in #WhereClause):id='2' and Uname='def' and Uaddress='WD'
Thirdly(in #WhereClause):id='3' and Uname='' and Uaddress='KL'
.
.
How i get it. Thanks in advance.
Try this:
declare #WhereClause nvarchar(max)
declare #CountVal int
select #CountVal=count(*)
from ##TestTable
while #CountVal>0
begin
select #WhereClause =
(
select ' and '+T.N.value('local-name(.)', 'nvarchar(max)')+'='+T.N.value('.', 'nvarchar(max)')
from (
select *
from ##TestTable
where id = #CountVal
for xml path(''), type
) as C(X)
cross apply C.X.nodes('/*') as T(N)
for xml path(''), type
).value('substring((./text())[1], 6)', 'nvarchar(max)')
select #WhereClause
set #CountVal=#CountVal-1
end
seem to be late and having missunderstood, that woul habe been my approach
DECLARE #XML XML
DECLARE #WhereClause nvarchar(max)
DECLARE #CountVal int
SELECT #CountVal=count(*) from ##TestTable
SET #XML= (SELECT * FROM ##TestTable FOR XML PATH('ParentNode'), ELEMENTS XSINIL)
SELECT #XML
;with cte as
( select xr.value('fn:local-name(.)','nvarchar(max)') name, xr.value('.','nvarchar(max)') val from #xml.nodes('//.') xq(xr)
where xr.value('fn:local-name(.)','nvarchar(max)')<>'' and xr.value('fn:local-name(.)','nvarchar(max)')<>'ParentNode'
)
SELECT #WhereClause= SubString((select Case when Name ='id' then CHAR(10) +''+ name + '='''+val+'''' else ' and ' + name + '='''+val+'''' end from cte for xml path('')),2,100000000)
Print #WhereClause

Insert multiple rows into temp table with one command in SQL2005

I've got some data in the following format:
-1,-1,-1,-1,701,-1,-1,-1,-1,-1,304,390,403,435,438,439,442,455
I need to insert it into a temp table like this:
CREATE TABLE #TEMP
(
Node int
)
So that I can use it in a comparison with data in another table.
The data above represents separate rows of the "Node" column.
Is there an easy way to insert this data, all in one command?
Also, the data will actually being coming in as seen, as a string... so I need to be able to just concat it into the SQL query string. I can obviously modify it first if needed.
Try something like
CREATE TABLE #TEMP
(
Node int
)
DECLARE #textXML XML
DECLARE #data NVARCHAR(MAX),
#delimiter NVARCHAR(5)
SELECT #data = '-1,-1,-1,-1,701,-1,-1,-1,-1,-1,304,390,403,435,438,439,442,455 ',
#delimiter = ','
SELECT #textXML = CAST('<d>' + REPLACE(#data, #delimiter, '</d><d>') + '</d>' AS XML)
INSERT INTO #TEMP
SELECT T.split.value('.', 'nvarchar(max)') AS data
FROM #textXML.nodes('/d') T(split)
SELECT * FROM #TEMP
DROP TABLE #TEMP
You can create a query dynamically like this:
declare #sql varchar(1000)
set #sql = 'insert into #TEMP select ' + replace(#values, ',', ' union all select ')
exec #sql
As always when creating queries dynamically, you have to be careful so that you only use trusted data.
I would create a function that would return a table variable and then join that function into the select
Use:
select * from myTable a
inner join dbo.buildTableFromCSV('1,2,3') on a.id = b.theData
Here is my function for doing this
CREATE FUNCTION [dbo].[buildTableFromCSV] ( #csvString varchar(8000) ) RETURNS #myTable TABLE (ID int identity (1,1), theData varchar(100))
AS BEGIN
DECLARE #startPos Int -- position to chop next block of chars from
DECLARE #currentPos Int -- position to current character we're examining
DECLARE #strLen Int
DECLARE #c char(1) -- current subString
-- variable initalization
-- -------------------------------------------------------------------------------------------------------------------------------------------------
SELECT #csvString = #csvString + ','
SELECT #startPos = 1
SELECT #currentPos = 1
SELECT #strLen = Len(#csvString)
-- loop over string and build temp table
-- -------------------------------------------------------------------------------------------------------------------------------------------------
WHILE #currentPos <= #strLen BEGIN
SET #c = SUBSTRING(#csvString, #currentPos, 1 )
IF ( #c = ',' ) BEGIN
IF ( #currentPos - #startPos > 0 ) BEGIN
INSERT
INTO #myTable ( theData )
VALUES ( CAST( SUBSTRING ( #csvString, #startPos, #currentPos - #startPos) AS varchar ) )
END
ELSE
begin
INSERT
INTO #myTable ( theData )
VALUES ( null )
end
SELECT #startPos = #currentPos + 1
END
SET #currentPos = #currentPos + 1
END
delete from #myTable where theData is null
return
END