Asterisk in sql script [duplicate] - sql

I have a folder called "Dump." This folder consists of various .CSV Files.
The folder Location is 'C:\Dump'
I want to Import the contents of these files into SQL Server.
I want the rough code along with proper comments so that I understand it.
I have tried a few codes that I found on the Net. But they haven't quite worked out for me for some strange reason.
The steps I would like to have are
Step 1: Copy all the File Names in the folder to a Table
Step 2: Iterate through the table and copy the data from the files using Bulk Insert.
Someone do please help me out on this one. Thanks a lot in advance :)

--BULK INSERT MULTIPLE FILES From a Folder
--a table to loop thru filenames drop table ALLFILENAMES
CREATE TABLE ALLFILENAMES(WHICHPATH VARCHAR(255),WHICHFILE varchar(255))
--some variables
declare #filename varchar(255),
#path varchar(255),
#sql varchar(8000),
#cmd varchar(1000)
--get the list of files to process:
SET #path = 'C:\Dump\'
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM ALLFILENAMES where WHICHFILE like '%.csv%'
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
--bulk insert won't take a variable name, so make a sql and execute it instead:
set #sql = 'BULK INSERT Temp FROM ''' + #path + #filename + ''' '
+ ' WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 2
) '
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1
--Extras
--delete from ALLFILENAMES where WHICHFILE is NULL
--select * from ALLFILENAMES
--drop table ALLFILENAMES

This will give you separate tables for each file.
--BULK INSERT MULTIPLE FILES From a Folder
drop table allfilenames
--a table to loop thru filenames drop table ALLFILENAMES
CREATE TABLE ALLFILENAMES(WHICHPATH VARCHAR(255),WHICHFILE varchar(255))
--some variables
declare #filename varchar(255),
#path varchar(255),
#sql varchar(8000),
#cmd varchar(1000)
--get the list of files to process:
SET #path = 'D:\Benihana\backup_csv_benihana_20191128032207_part_1\'
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
delete from ALLFILENAMES where WHICHFILE is null
--SELECT replace(whichfile,'.csv',''),* FROM dbo.ALLFILENAMES
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM ALLFILENAMES where WHICHFILE like '%.csv%' order by WHICHFILE desc
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
--bulk insert won't take a variable name, so make a sql and execute it instead:
set #sql =
'select * into '+ Replace(#filename, '.csv','')+'
from openrowset(''MSDASQL''
,''Driver={Microsoft Access Text Driver (*.txt, *.csv)}''
,''select * from '+#Path+#filename+''')'
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1

For Step 1 Maybe you can look at:
http://www.sql-server-performance.com/forum/threads/copying-filenames-to-sql-table.11546/
or
How to list files inside a folder with SQL Server
and then Step 2
How to cast variables in T-SQL for bulk insert?
HTH

You might need to enable the xp_cmdshell first:
sp_configure 'show advanced options', '1'
RECONFIGURE
go
sp_configure 'xp_cmdshell', '1'
RECONFIGURE
go
And, to enable ad_hoc,
sp_configure 'show advanced options', 1;
RECONFIGURE;
GO
sp_configure 'Ad Hoc Distributed Queries', 1;
RECONFIGURE;
GO

To solve step 1, xp_dirtree can also be used to list all files and folders.
Keep in mind that it is an undocumented function. Security precautions must be considered. Intentionally crafted filenames could be an intrusion vector.

In python you can use d6tstack which makes this simple
import d6tstack
import glob
c = d6tstack.combine_csv.CombinerCSV(glob.glob('*.csv'))
c.to_mssql_combine('mssql+pymssql://usr:pwd#localhost/db', 'tablename')
See SQL examples. It also deals with data schema changes, creates table and allows you to preprocess data. It leverages BULK INSERT so should be just as fast.

to expand upon the answer by SarangArd you can replace temp with the following if your file name matches your table name.
' + Left(#filename, Len(#filename)-4) + '

This code will create a new table per CSV file that is imported.
Best to populate empty database from CSV files.
CREATE TABLE ALLFILENAMES
(
WHICHPATH VARCHAR(255)
,WHICHFILE VARCHAR(255)
)
DECLARE #filename VARCHAR(255),
#path VARCHAR(255),
#sql VARCHAR(8000),
#cmd VARCHAR(1000)
SET #path = 'L:\DATA\SOURCE\CSV\' --PATH TO YOUR CSV FILES (CHANGE TO YOUR PATH)
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES
SET WHICHPATH = #path
WHERE WHICHPATH IS NULL
DECLARE c1 CURSOR
FOR SELECT WHICHPATH
,WHICHFILE
FROM ALLFILENAMES
WHERE WHICHFILE LIKE '%.csv%'
OPEN c1
FETCH NEXT FROM c1 INTO #path,
#filename
WHILE ##fetch_status <> -1
BEGIN
CREATE TABLE #Header
(
HeadString NVARCHAR(MAX)
)
DECLARE #Columns NVARCHAR(MAX) = ''
DECLARE #Query NVARCHAR(MAX) = ''
DECLARE #QUERY2 NVARCHAR(MAX) = ''
DECLARE #HeaderQuery NVARCHAR(MAX) = ''
SELECT #HeaderQuery = #HeaderQuery + 'bulk insert #Header from ''' + #path + #filename + '''
with(firstrow=1,lastrow=1)'
EXEC (#HeaderQuery)
SELECT #Columns = (SELECT QUOTENAME(value) + ' nvarchar(max)' + ','
FROM #Header
CROSS APPLY STRING_SPLIT(HeadString,',') FOR xml PATH(''))
IF ISNULL(#Columns,'') <> ''
BEGIN
SET #Columns = LEFT(#Columns,LEN(#Columns) - 1)
SELECT #Query = #Query + 'CREATE TABLE ' + Replace(#filename,'.csv','') + ' (' + replace(#Columns,'"','') + ')'
PRINT #Query
EXEC (#QUERY)
END
SELECT #QUERY2 = #QUERY2 + 'bulk insert ' + replace(Replace(#filename,'.csv',''),'.TPS','') + ' from ''' + #path + #filename + '''
with(firstrow=2,FORMAT=''csv'',FIELDTERMINATOR='','',ROWTERMINATOR=''\n'')'
EXEC (#QUERY2)
DROP TABLE #Header
FETCH NEXT FROM c1 INTO #path,
#filename
END
CLOSE c1
DEALLOCATE c1

Related

Can't see the inserted files in my database using Cursor

I have this code and executes with no issues but i dont see the tables or files that are supposed to be in the 'Names' Database here is the queries
And this is the data im trying to insert in bulk
also here is the code:
CREATE PROCEDURE import_txt_files12
AS
BEGIN
DECLARE #file varchar(255)
DECLARE #path varchar(255)
DECLARE #sql varchar(8000)
DECLARE #tablename varchar(255)
SET #path = 'C:\Users\marti\OneDrive\Desktop\Data Analist course\Datasets\names\'
DECLARE file_cursor CURSOR FOR
SELECT physical_name FROM sys.master_files
WHERE [name] LIKE 'yob%' AND physical_name LIKE #path + '%'
OPEN file_cursor
FETCH NEXT FROM file_cursor INTO #file
WHILE ##FETCH_STATUS = 0
BEGIN
-- Extract table name from file name
SET #tablename = SUBSTRING(#file, LEN(#path), LEN(#file) - LEN(#path) - 4)
SET #sql = 'BULK INSERT [Names].[dbo].[' + #tablename + '] FROM ''' + #file + ''' WITH (FORMAT = ''txt'', FIRSTROW = 1, FIELDTERMINATOR = '','', ROWTERMINATOR = ''\n'')'
EXEC (#sql)
FETCH NEXT FROM file_cursor INTO #file
END
CLOSE file_cursor
DEALLOCATE file_cursor
END

WHILE EXIST NOT WORKING - To insert data to tables dynamically

I am trying to insert data from txt files to existing tables dynamically. There is no error shown, it just doesn't stop running. I am not sure what the mistake I am doing in the while loop is.
IF OBJECT_ID('TEMPDB..#FILES') IS NOT NULL DROP TABLE #FILES
CREATE TABLE #FILES
(
FileName VARCHAR(MAX),
DEPTH VARCHAR(MAX),
[FILE] VARCHAR(MAX)
)
INSERT INTO #FILES
EXEC master.dbo.xp_DirTree '\\Server1.newdevlab.local\SQLBackups_2012\Internal\Nimisha\',1,1
Select * from #FILES
DECLARE #FILENAME VARCHAR(MAX),#SQL VARCHAR(MAX)
DECLARE #TABLENAME Varchar(12)
SET NOCOUNT ON;
WHILE EXISTS (SELECT 1 FROM #FILES)
BEGIN
BEGIN TRY
SET #FILENAME = (SELECT TOP 1 FileName FROM #FILES)
SET #TABLENAME = 'Buddy.' + right(#TABLENAME, len(#TABLENAME)-6)
SET #SQL = N'BULK INSERT ' + #TABLENAME + '' +
N' FROM ''\\Server1.newdevlab.local\SQLBackups_2012\Internal\Nimisha\' + #FILENAME +'''
WITH (FIRSTROW = 2, FIELDTERMINATOR = ''\t'', ROWTERMINATOR = ''\n'');'
PRINT #SQL
EXEC(#SQL)
DELETE FROM #FILES WHERE FileName = #FILENAME
END TRY
BEGIN CATCH
PRINT 'Failed processing : ' + #FILENAME
END CATCH
END
USE xxxx
IF OBJECT_ID('TEMPDB..#FILES') IS NOT NULL DROP TABLE #FILES
CREATE TABLE #FILES
(
FileName VARCHAR(100),
DEPTH VARCHAR(100),
[FILE] VARCHAR(MAX)
)
INSERT INTO #FILES
EXEC master.dbo.xp_DirTree '\\server.newdevlab.local\SQLBackups_2012\Internal\xxx\',1,1
Select * from #FILES
DECLARE #FILENAME VARCHAR(MAX) ,#SQLString VARCHAR(MAX)
DECLARE #TABLENAME Varchar(50)
DECLARE file_cursor CURSOR
FOR
SELECT FileName FROM #FILES
OPEN file_cursor
FETCH NEXT
FROM file_cursor
INTO #FileName
WHILE ##FETCH_STATUS = 0
BEGIN
BEGIN TRY
SET #TABLENAME = SUBSTRING(('Buddy.' + substring(#FILENAME,7, len(#FILENAME)-3)), 1 ,LEN(('Buddy.' + substring(#FILENAME,7, len(#FILENAME)-3)))-4)
SET #SQLString = N'BULK INSERT ' + #TABLENAME + '' +
N' FROM ''\\server.newdevlab.local\SQLBackups_2012\Internal\xxx\' + #FILENAME +'''
WITH (FIRSTROW = 2, FIELDTERMINATOR = ''\t'', ROWTERMINATOR = ''\n'');'
SELECT #SQLString
EXEC(#SQLString)
END TRY
BEGIN CATCH
SELECT ERROR_NUMBER(),ERROR_SEVERITY(),ERROR_STATE(),ERROR_PROCEDURE(),ERROR_LINE(),ERROR_MESSAGE(),USER,GETDATE()
END CATCH
FETCH NEXT
FROM file_cursor
INTO #FileName
END
CLOSE file_cursor
DEALLOCATE file_cursor
If your intention is to loop until you process all the records in the table, then I think you go with
get the count of records before the loop and loop until your counter exceeds the total number of records. I know I am not able to resolve your issue but giving a workaround. Hope this is helpful to you.

MSSMS - CSV file contains data but returns NULL with stored procedures

I have been trying my darndest to get the code described below to work. I am very inexpert at MSSMS and SQL. That said, I love the efficiency of SQL databases and would really love to make this code work.
I have tested my CSV files with this code:
BULK INSERT BCPData
FROM 'D:\cheese\bcp_test.csv'
WITH (FIRSTROW = 2,
FIELDTERMINATOR = ','
,ROWTERMINATOR = '0x0a'
);
GO
They import easily and the data appears.
However, if I try to use the code shown below (I need an code that automatically imports multiple CSV files into my table) I only get "NULL" results in the columns.
My query is as follows:
exec ImportFiles 'd:\cheese\' , 'd:\cheese\Archive' , 'bcp*.csv' , 'MergeBCPData'
I run this query after using the following code to create the necessary stored procedures:
if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[ImportFiles]') and `OBJECTPROPERTY(id, N'IsProcedure') = 1)`
drop procedure [dbo].[ImportFiles]
GO
create procedure ImportFiles
#FilePath varchar(1000) = 'd:\cheese\' ,
#ArchivePath varchar(1000) = 'd:\cheese\Archive\' ,
#FileNameMask varchar(1000) = 'bcp*.csv' ,
#MergeProc varchar(128) = 'MergeBCPData'
AS
set nocount on
declare #ImportDate datetime
select #ImportDate = getdate()
declare #FileName varchar(1000) ,
#File varchar(1000)
declare #cmd varchar(2000)
create table ##Import (s varchar(8000))
create table #Dir (s varchar(8000))
/*****************************************************************/
-- Import file
/*****************************************************************/
select #cmd = 'dir /B ' + #FilePath + #FileNameMask
delete #Dir
insert #Dir exec master..xp_cmdshell #cmd
delete #Dir where s is null or s like '%not found%'
while exists (select * from #Dir)
begin
select #FileName = min(s) from #Dir
select #File = #FilePath + #FileName
select #cmd = 'bulk insert'
select #cmd = #cmd + ' ##Import'
select #cmd = #cmd + ' from'
select #cmd = #cmd + ' ''' + replace(#File,'"','') + ''''
select #cmd = #cmd + ' with (FIELDTERMINATOR = '','''
select #cmd = #cmd + ',ROWTERMINATOR = ''0x0a''
)'
truncate table ##Import
-- import the data
exec (#cmd)
-- remove filename just imported
delete #Dir where s = #FileName
exec #MergeProc
-- Archive the file
select #cmd = 'move ' + #FilePath + #FileName + ' ' + #ArchivePath + #FileName
exec master..xp_cmdshell #cmd
end
drop table ##Import
drop table #Dir
go
if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[MergeBCPData]') and OBJECTPROPERTY(id, N'IsProcedure') = 1)
drop procedure [dbo].[MergeBCPData]
GO
create procedure MergeBCPData
AS
set nocount on
-- insert data to production table
insert BCPData
(
City ,
Visit_Duration_Seconds ,
Timezone ,
Most_Likely_Company
)
select
SUBSTRING('City', 1, 5),
SUBSTRING('Visit_Duration_Seconds', 1, 12),
SUBSTRING('Timezone', 1, 3),
SUBSTRING('Most_Likely_Company',1, 30)
from ##Import
go
Any help would be very appreciated. I'm hopeful it is just an error that my inexperienced eyes are too novel to catch. THANK YOU!
You stated 'I need an code that automatically imports multiple CSV files into my table'. Is there a pattern in the file names that you can exploit? Do the file names have dates in them, per chance? If there is some repeating pattern that you can exploit, like a series of dates, you can loop through all the files in your folder, and append all files to one table, in one go. Check out the code below, and post back if you have questions.
DECLARE #intFlag INT
SET #intFlag = 1
WHILE (#intFlag <=50) – we are running 50 loops...change this as needed
BEGIN
PRINT #intFlag
declare #fullpath1 varchar(1000)
select #fullpath1 = '''\\your_path_here\FTP\' + convert(varchar, getdate()- #intFlag , 112) + '_Daily.csv'''
declare #cmd1 nvarchar(1000)
select #cmd1 = 'bulk insert [dbo].[Daily] from ' + #fullpath1 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 5, ROWTERMINATOR=''0x0a'')'
exec (#cmd1)
SET #intFlag = #intFlag + 1
END
Here are some common date formats.
http://www.sql-server-helper.com/tips/date-formats.aspx
Again, I'm assuming you have dates in your file names.

Error "Declare Scalar" when Dynamic Exec Update inside two Cursor loops

Basically I'm looking to loop through a temp table which lists certain table names which need updated, I take each table name use it to populate another temporary table of all the ID's which are to be updated..
I can select the data in each table needing updated using this structure, but cannot seem to get the inner cursor to run as it isn't picking up the temp table..
Any help would be greatly appreciated as this has been doing my nut in for the past few hours..
Cheers,
DECLARE #table INT
DECLARE #prefix nvarchar(3)
DECLARE #TableName nvarchar(50)
DECLARE #TableIdName nvarchar(50)
DECLARE #getTable CURSOR
SET #getTable = CURSOR FOR
SELECT DISTINCT(id)
FROM #t
OPEN #getTable
FETCH NEXT
FROM #getTable INTO #table
WHILE ##FETCH_STATUS = 0
BEGIN
SELECT #TableName = name FROM #t WHERE id = #table
SET #TableIdName = #TableName + 'Id'
SELECT #prefix = prefix FROM #t WHERE name = #TableName
--PRINT #table
PRINT #TableName
--PRINT #TableIdName
--PRINT #prefix
DECLARE #temptable table(rid int, rTableName nvarchar(50), rprefix nvarchar(3), rpk nvarchar(50))
EXEC ('INSERT INTO ' + #temptable + ' SELECT ' + #TableIdName + ', ' + #TableName + ', ' + #prefix + #TableIdName + ' FROM ' + #TableName)
DECLARE #rTableName nvarchar(50)
DECLARE #rpk nvarchar(50)
DECLARE #rprefix nvarchar(3)
DECLARE #row INT
DECLARE #getRow CURSOR
SET #getRow = CURSOR FOR
SELECT DISTINCT(rid)
FROM #temptable
OPEN #getRow
FETCH NEXT
FROM #getRow INTO #row
WHILE ##FETCH_STATUS = 0
BEGIN
PRINT #row
SELECT #rTableName = rTableName FROM #temptable WHERE rid = #row
SELECT #rpk = rpk FROM #temptable WHERE rid = #row
SELECT #rprefix = rprefix FROM #temptable WHERE rid = #row
EXEC ('UPDATE ' + #rTableName + ' SET CoiRef = ' + #rprefix + '_' + #row + ' WHERE ' + #rpk + ' = ' + #row)
FETCH NEXT
FROM #getRow INTO #row
END
CLOSE #getRow
DEALLOCATE #getRow
FETCH NEXT
FROM #getTable INTO #table
END
CLOSE #getTable
DEALLOCATE #getTable
I have also tried another solution using sp_executesql but get similar errors there, this was using the below code instead on the first EXEC..
DECLARE #sqlCommand nvarchar(500)
SET #sqlCommand = 'INSERT INTO #temptable SELECT TableIdName, TableName, prefix, TableIdName FROM' + #TableName
EXECUTE sp_executesql #sqlCommand, N'#temptable nvarchar(50) output', #temptable OUTPUT
Again, Any help would be greatly appreciated..
Thanks..
Gerry
I think the error is coming from the fact that you're trying to use a table variable inside of dynamic sql. This is not something that is supported because the table variable is out of scope for the dynamic sql. You should make your #temptable into a temporary table using the create table command.
In the unsolicited advice category, I'd suggest attempting to recreate this without using cursors if at all possible, as cursors go against the concept of set based processing which is the foundation of sql server.

SQL Server : how to dump/extract each row to separate file

I have a business requirement to take a table with columns PrimaryKey, A, B, C, D, E and dump each to a file such as:
filename: Primarykey.txt
(row 1) A
(row 2) B
(row 3) C
etc.
Is there a good way to do this with SQL Server 2008 or should I write a C# program using a datatable? The table I am using has about 200k rows in it.
Thanks
The links below contain some previous posts and another link to a possible solution using SSIS.
You might have to play around until you get what you want.
Good luck.
Some clues here
or
SQL Server Forums - Create multiple files from SSIS
Using xp_cmdshell
If xp_cmdshell is permitted, you might be able to use something like this:
declare #path varchar(200)
declare #schema varchar(100)
declare #pk varchar(100)
declare #sql varchar(2000)
declare #cmd varchar(2500)
set #path = 'C:\your\file\path\'
declare rowz cursor for (select PrimaryKey from TableA)
open rowz
fetch next from rowz into #pk
while ##FETCH_STATUS = 0
begin
set #sql = 'select A, B, C, D, E from TableA where PrimaryKey = ''' + #pk + ''''
set #cmd = 'bcp "' + #sql + '" queryout "' + #path + #pk + '.txt" -T -c -t\n'
exec xp_cmdshell #cmd
fetch next from rowz into #item
end
close rowz
deallocate rowz
Using sqlcmd
Alternately, you can create a series of statements to run through sqlcmd to create the separate files.
First, create a temporary table and add some boilerplate entries:
create table #temp (tkey int, things varchar(max))
insert into #temp (tkey, things) values (0, 'SET NOCOUNT ON;
GO'),(2, ':out stdout')
Then, fill the temporary table with the queries that we need:
declare #dir varchar(250)
declare #sql varchar(5000)
declare #cmd varchar(5500)
declare #schema varchar(100)
declare #pk varchar(100)
set #schema = 'YourSchema'
declare rowz cursor for (select PrimaryKey from "YourSchema"..TableA)
open rowz
fetch next from rowz into #pk
while ##FETCH_STATUS = 0
begin
set #dir = '"C:\your\file\path\'+#pk+'.txt"'
set #sql = '
SELECT A +''
''+ B +''
''+ C +''
''+ D +''
''+ E
FROM "'+#schema+'"..TableA where PrimaryKey = '+#pk
set #cmd = ':out '+#dir+#sql+'
GO'
insert into #temp (tkey,things) values (1, #cmd)
fetch next from rowz into #pk
end
close rowz
deallocate rowz
Next, query the temporary table to get the generated queries:
select things from #temp order by tkey
Finally, copy the results to a file and send this file as input to sqlcmd.exe at the command prompt with the parameter -h -1:
C:\your\file\path>sqlcmd -h -1 -S YourServer -i "script.sql"