Dynamic SQL - Picking the values from the temp table - sql

I have a temp table that has the following information.
TableName ColumnName PrimaryKeyColumnName Primarykey(A GUID)
(4 columns)
I need to include one more new column Value that should grab data based on the condition below
"for each row in the temp table"
select ColumnName from TableName where PrimaryKeyColumnName = Primarykey
To be even more precise , the query must retrieve the values from the table directly.
I am sure this can be achieved using cursor. But it might impact my query performance.
Could this be achieved using sp_sqlexecute? I tried converting the query as a varchar.
Like this
set #sql = 'select '+ #final.[primary field] +'from ' + #final.tablename +
'where '+ #final.PrimaryKeyColumnName + '='+ #final.Primarykey
exec sp_sqlexecute #sql

Possible this be helpful for you -
IF OBJECT_ID('tempdb.dbo.#temp') IS NOT NULL
DROP TABLE #temp
GO
CREATE TABLE #temp
(
[primary field] SYSNAME,
tablename SYSNAME,
PrimaryKeyColumnName SYSNAME,
Primarykey SYSNAME
)
INSERT INTO #temp ([primary field], tablename, PrimaryKeyColumnName, Primarykey)
VALUES
('[column1]','[table1]','[column3]', '[column12]'),
('[column2]','[table2]','[column4]', '[column24]')
DECLARE #SQL NVARCHAR(MAX)
SET #SQL = (
SELECT CHAR(13) + 'SELECT '+ [primary field] + ' FROM ' + tablename +
' WHERE '+ PrimaryKeyColumnName + ' = '+ Primarykey
FROM #temp
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)')
PRINT #SQL
--EXEC sys.sp_executesql #SQL
Output -
SELECT [column1] FROM [table1] WHERE [column3] = [column12]
SELECT [column2] FROM [table2] WHERE [column4] = [column24]

I am not sure that I understood what you are trying to do but try this:
declare #sql varchar(8000);
set #sql = null;
select #sql =
coalesce(#sql+' union all select '+ColumnName+' from '+ tablename,
'select '+ColumnName+' from '+ tablename)
from final where ColumnName = PrimaryKeyColumnName;
exec sp_sqlexecute #sql;
Notice that the #sql is limited -in my sample 8000- if the temp table you have is too big you gonna have to make a separate executes,needs some changes in the code.
Always you have to set an initial value for #SQL as null.
I think no need for cursor, this will be solved using some SQL tricks.
Hope this will help...

Create Table #tempValueFetch
(
TableName sysname,
PrimaryKeyColumnName sysname,
AttributeColumnName sysname,
Primaryfield sysname,
PrimaryKey sysname,
Label nvarchar(max),
Value nvarchar(max)
)
this temp table has been populated with 5 columns namely TableName,PrimaryKeyColumnName,AttributeColumnName ,Primaryfield ,PrimaryKey
The columns label and value has to updated dynamically.Note that primary key is a uniqueidentifier

Related

Verify all columns can convert from varchar to float

I have tried a bunch of different ways like using cursors and dynamic SQL, but is there a fast way to verify that all columns in a given table can convert from varchar to float (without altering the table)?
I want to get a print out of which columns fail and which columns pass.
I am trying this method now but it is slow and cannot get the list of columns that pass or error out.
drop table users;
select *
into users_1
from users
declare #cols table (i int identity, colname varchar(100))
insert into #cols
select column_name
from information_schema.COLUMNS
where TABLE_NAME = 'users'
and COLUMN_NAME not in ('ID')
declare #i int, #maxi int
select #i = 1, #maxi = MAX(i) from #cols
declare #sql nvarchar(max)
while(#i <= #maxi)
begin
select #sql = 'alter table users_1 alter column ' + colname + ' float NULL'
from #cols
where i = #i
exec sp_executesql #sql
select #i = #i + 1
end
I found this code on one of the SQL tutorials sites.
Why all the drop/create/alter nonsense? If you just want to know if a column could be altered, why leave your table in a wacky state, where the columns that can be altered are altered, and the ones that can't just raise errors?
Here's one way to accomplish this with dynamic SQL (and with some protections):
DECLARE #tablename nvarchar(513) = N'dbo.YourTableName';
IF OBJECT_ID(#tablename) IS NOT NULL
BEGIN
DECLARE #sql nvarchar(max) = N'SELECT ',
#tmpl nvarchar(max) = N'[Can $colP$ be converted?]
= CASE WHEN EXISTS
(
SELECT 1 FROM ' + #tablename + N'
WHERE TRY_CONVERT(float, COALESCE($colQ$,N''0'')) IS NULL
)
THEN ''No, $colP$ cannot be coverted''
ELSE ''Yes, $colP$ CAN be converted'' END';
SELECT #sql += STRING_AGG(
REPLACE(REPLACE(#tmpl, N'$colQ$',
QUOTENAME(name)), N'$colP$', name), N',')
FROM sys.columns
WHERE object_id = OBJECT_ID(#tablename)
AND name <> N'ID';
EXEC sys.sp_executesql #sql;
END
Working db<>fiddle
This is never going to be "fast" - there is no great shortcut to having to read and validate every value in the table.

Iterate through temporary table columns to select them

I have a final temporary table (#tempTable) with unknown columns number.
My final select is like this, it works :
SELECT temp.* FROM #tempTable temp
But instead of a '*' I would like to call each columns individually :
SELECT temp.col1, temp.col2 FROM #tempTable temp
To do so I need to iterate through my columns names and create a procedure, I tried something like this :
DECLARE #ColName VARCHAR(255)
SELECT #ColName = min(name) FROM tempdb.sys.columns
WHERE object_id = Object_id('tempdb..#TEMPTABLE');
WHILE #ColName is not null
BEGIN
-- i need to do it all in once and not each time....
declare #sql varchar(max) = 'SELECT tp.'+'#COlName'+'FROM #TEMPTABLE tp'
exec(#sql)
-- Increment the value, how to go to next column ?
select #ColName = min(name) FROM tempdb.sys.columns WHERE object_id =
Object_id('tempdb..#TEMPTABLE') > #ColName -- does not work because it is a string (column name)
END
Try this:
DECLARE #ColName VARCHAR(2000) = 'select '
SELECT #ColName = #ColName + ' temp.' + name + ',' FROM tempdb.sys.columns
WHERE object_id = Object_id('tempdb..#TEMPTABLE')
--delete last character, which is comma and append table name
#ColName = substring(#ColName, 1, LEN(#ColName) - 1) + ' from #TEMPTABLE temp'
exec(#ColName)
This query construct whole table list combined in select ... from ... statement. I increased size of the varchar variable, so it can accomodate long queries.
Also, IMO variable name such as #sql or #query would be more meaningful.
A set based approach
IF OBJECT_ID('tempdb..#TEMPTABLE','U') IS NOT NULL
DROP TABLE #TEMPTABLE;
CREATE TABLE #TEMPTABLE (
Id INT IDENTITY(1,1)
,Col1 INT
,Col2 BIGINT
,Col3 BIGINT
,Col4 DATETIME
,Col5 DATETIME
) ;
DECLARE #SQL NVARCHAR(MAX)
SELECT #SQL = N'SELECT ' + SUBSTRING((
SELECT N', temp.' + S.name
FROM
tempdb.sys.columns S
WHERE
S.object_id = OBJECT_ID('tempdb..#TEMPTABLE')
ORDER BY
S.column_id
FOR XML PATH('')
)
,2
,200000
) + N' FROM #TEMPTABLE temp'
EXEC sys.sp_executesql #SQL

i want to fetch data according to tables which are dynamic means their name might change in databse from time to time

I am working on a query, where I need to fetch data from tables which are dynamic in nature, meaning their name might change in the database from time to time.
So, first step is that from the schema, I will get to know what the recent table names are under that schema from which I have to fetch the data, but the second part is the difficult one for me: Once I've got the table name, how could I use those tables to fetch data inside them?
You can use dynamic SQL for that
use tempdb
GO
CREATE TABLE dynamic_table
(
a INT,
b INT);
GO
INSERT INTO dynamic_table VALUES (1,1);
GO
DECLARE #table_name VARCHAR(200) = 'dynamic_table';
DECLARE #sql VARCHAR(MAX),
#columns VARCHAR(MAX);
SELECT #columns =
d FROM (
SELECT DISTINCT SUBSTRING(
(
SELECT ',' + ds1.name AS [text()]
FROM sys.columns ds1
WHERE ds1.object_id = ds2.object_id
ORDER BY ds1.name FOR XML PATH('')
), 2, 1000) AS d
FROM sys.columns ds2 WHERE ds2.object_id = OBJECT_ID(#table_name)
) a;
SET #sql = 'SELECT ' + #columns + ' FROM ' + #table_name;
EXEC (#sql)
DROP TABLE dynamic_table
GO
Use the below one and change as per your requirement, Hope it's helpful.
Declare #Table varchar(100)
Declare #Sql nvarchar(max)
Set #Table = 'etl_run'
Set #Sql = 'Select * from ' + #Table
exec (#Sql)

query every single database

I would like to search every database in my DB search (they all have the same table name) with the following:
SELECT DISTINCT NAME FROM TABLE WHERE NAME = 'blah'
I have tried this:
SET NOCOUNT ON;
IF OBJECT_ID (N'tempdb.dbo.#temp') IS NOT NULL
DROP TABLE #temp
CREATE TABLE #temp
(
[COUNT] INT
, DB VARCHAR(50)
)
DECLARE #TableName NVARCHAR(50)
SELECT #TableName = '[dbo].[TABLE]'
DECLARE #SQL NVARCHAR(MAX)
SELECT #SQL = STUFF((
SELECT CHAR(13) + 'SELECT ''' + name + ''', COUNT(1) FROM [' + name + '].' + #TableName
FROM sys.databases
WHERE OBJECT_ID(name + '.' + #TableName) IS NOT NULL
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '')
INSERT INTO #temp (DB, [COUNT])
EXEC sys.sp_executesql #SQL
SELECT *
FROM #temp t
The easiest way is to use (the undocumented) sp_msforeachdb. It's usage is:
exec sp_msforeachdb #command1 = N'insert into #tmp select "?", * from [?].dbo.table';
Of note is the ?, which is a placeholder for the database name. Within the #command1 string, double-quotes are used in place of single-quotes.
This procedure will also enumerate the system databases, and you can fairly easily sidestep that by appending if db_id("?") > 4 to skip them.

SQL Server : query to insert data into table from another table with different struct

I have two tables in two different databases.
My first table is an older version and has fewer columns than the second table.
I want to copy the contents of my old table to my new table.
In each database table there are several distribution in this case.
How can I do to quickly copy data from old tables to the new without having to write the column names manually for each table?
Thanks!
You can "avoid writing the column names manually" in SSMS by dragging and dropping the "Columns" folder under the table in the Object Explorer over to a query window (just hold the dragged item over whitespace or the character position where you want the names to appear). All the column names will be displayed separated by commas.
You could also try something like this to get just the list of columns that are common between two tables (then writing the INSERT statement is trivial).
SELECT
Substring((
SELECT
', ' + S.COLUMN_NAME
FROM
INFORMATION_SCHEMA.COLUMNS S
INNER JOIN INFORMATION_SCHEMA.COLUMNS D
ON S.COLUMN_NAME = D.COLUMN_NAME
WHERE
S.TABLE_SCHEMA = 'dbo'
AND S.TABLE_NAME = 'Source Table'
AND D.TABLE_SCHEMA = 'dbo'
AND D.TABLE_NAME = 'Destination Table'
FOR XML PATH(''), TYPE
).value('.[1]', 'nvarchar(max)'), 3, 21474783647)
;
You could also create an SSIS package that simply moves all the data from one table to the other. Column names that match would automatically be linked up. Depending on your familiarity with SSIS, this could take you 2 minutes, or it could take you 2 hours.
The following code should do the work.
Basically what it does is:
1. Collects column names from both tables.
2. Intersects the column names in order to filter out columns that exists only in 1 table.
3. Get a string which is the column names delimited by a comma.
4. Using the string from stage #3 creating the insert command.
5. Executing the command from stage #4.
--BEGIN TRAN
DECLARE #oldName NVARCHAR(50) = 'OldTableName', #newName NVARCHAR(50) = 'newTableName'
DECLARE #oldDBName NVARCHAR(50) = '[OldDBName].[dbo].['+#oldName+']', #newDBName NVARCHAR(50) = '[newDBName].[dbo].['+#newName+']'
/*This table variable will have columns that exists in both table*/
DECLARE #tCommonColumns TABLE(
ColumnsName NVARCHAR(max) NOT NULL
);
INSERT INTO #tCommonColumns
SELECT column_name --,*
FROM information_schema.columns
WHERE table_name = #oldName
AND COLUMNPROPERTY(object_id(#oldName), column_name, 'IsIdentity') = 0 --this will make sure you ommit IDentity columns
INTERSECT
SELECT column_name --, *
FROM information_schema.columns
WHERE table_name = #newName
AND COLUMNPROPERTY(object_id(#newName), column_name,'IsIdentity') = 0--this will make sure you ommit IDentity columns
--SELECT * FROM #tCommonColumns
/*Get the columns as a comma seperated string */
DECLARE #columns NVARCHAR(max)
SELECT DISTINCT
#columns = STUFF((SELECT ', ' + cols.ColumnsName
FROM #tCommonColumns cols
FOR XML Path('')),1,1,'')
FROM #tCommonColumns
PRINT #columns
/*Create tyhe insert command*/
DECLARE #InserCmd NVARCHAR(max)
SET #InserCmd =
'INSERT INTO '+#newDBName +' ('+#columns +')
SELECT '+#columns +' FROM '+#oldDBName
PRINT #InserCmd
/*Execute the command*/
EXECUTE sp_executesql #InserCmd
--ROLLBACK
Please note that this script might fail if you have FOREIGN KEY Constraints That are fulfiled in the old table but not in the new table.
Edit:
The query was updated to omit Identity columns.
Edit 2:
query updated for supporting different databases for the tables (make sure you set the #oldName ,#newName, #oldDBName, #newDBName variables to match actual credentials).
Thanks all !
I propose that it's more generic :)
--BEGIN TRAN
DECLARE #Tablename NVARCHAR(50)
SET #Tablename = 'tableName'
DECLARE #Schemaname NVARCHAR(50)
SET #Schemaname = 'schemaName'
DECLARE #Datasource NVARCHAR(50)
SET #Datasource = 'dataSource'
DECLARE #Datadest NVARCHAR(50)
SET #Datadest = 'dataDestination'
/*This table variable will have columns that exists in both table*/
DECLARE #tCommonColumns TABLE(
ColumnsName NVARCHAR(max) NOT NULL
);
--INSERT INTO #tCommonColumns
DECLARE #sql NVARCHAR(max)
SET #sql = 'SELECT column_name
FROM ' + #Datasource + '.information_schema.columns
WHERE table_name = ''' + #Tablename + '''
AND COLUMNPROPERTY(object_id(''' + #Datasource + '.' + #Schemaname + '.' + #Tablename + '''), column_name, ''IsIdentity'') = 0' --this will make sure you ommit IDentity columns
SET #sql = #sql + ' INTERSECT
SELECT column_name
FROM ' + #Datadest + '.information_schema.columns
WHERE table_name = ''' + #Tablename + '''
AND COLUMNPROPERTY(object_id(''' + #Datadest + '.' + #Schemaname + '.' + #Tablename + '''), column_name, ''IsIdentity'') = 0' --this will make sure you ommit IDentity columns'
INSERT INTO #tCommonColumns EXECUTE sp_executesql #sql
-- SELECT * FROM #tCommonColumns
/*Get the columns as a comma seperated string */
DECLARE #columns NVARCHAR(max)
SELECT DISTINCT
#columns = STUFF((SELECT ', ' + cols.ColumnsName
FROM #tCommonColumns cols
FOR XML Path('')),1,1,'')
FROM #tCommonColumns
--PRINT #columns
/*Create tyhe insert command*/
DECLARE #InserCmd NVARCHAR(max)
SET #InserCmd =
'INSERT INTO '+#Datadest+'.'+#Schemaname+'.'+#Tablename +' ('+#columns +')
SELECT '+#columns +' FROM '+#Datasource+'.'+#Schemaname+'.'+#Tablename
PRINT #InserCmd
/*Execute the command*/
--EXECUTE sp_executesql #InserCmd
--ROLLBACK
Something like this:
Insert into dbo.Newtbl
SELECT * FROM dbo.OldTbl