Error while moving files from one folder - sql-server

I have a query to move files from one folder(Source) to another(Destination)
I have given permission on both source and destination to network services and local services, I get an error
The system cannot find the file specified
I have tested the folders to see if I am able to to access the files and I was able to, it seems when I get the files it works. It is when I try to move the files to the Destination folder, I get this error.
My query below is as follow, I create a temp table to link to my source folder and insert its contents into it, I then filter it out into another temp table, where I then get the files I want to move and then use a cursor to iterate through it and move the files.
I hope this can assist.
Create Table #tmpDir
(
ID int IDENTITY(1,1),
fName varchar(400)
)
Declare #dir varchar(100)
Declare #folderPath varchar(500)
set #folderPath = '\\Server1\Documents\Ocs\Inbox\'
set #dir = 'DIR ' + '"' + #folderPath + '"'
print #dir
INSERT #tmpDir EXECUTE master.dbo.xp_cmdshell #dir
DELETE FROM #tmpDir where ID < 6
SELECT SUBSTRING(FName,40,100) fileName2
into #THIS
FROM #tmpDir
WHERE FName not like '%<DIR>%'
and FName not like '%bytes%' and FName is not null
Delete #THIS Where fileName2 Not Like '%_MyFiles%'
Declare #FileMove varchar(100)
Declare cur cursor for
Select fileName2 From #THIS
Open cur
Fetch next from cur into #FileMove
WHILE ##FETCH_STATUS = 0
BEGIN
Declare #cmd varchar(1000)
set #cmd = 'Move /y "\\Server1\Documents\Ocs\Inbox\' + #FileMove + ' "\\Server1\ImportMac\Inbox"'
EXECUTE master.dbo.xp_cmdshell #cmd
Fetch next from cur into #FileMove
END
Close cur
Deallocate cur
Drop Table #tmpDir, #THIS

You need to put a double quote at the end of the source file name for your move command:
set #cmd = 'Move /y "\\Server1\Documents\Ocs\Inbox\' + #FileMove + '" "\\Server1\ImportMac\Inbox"'

Related

Move file in SQL server based on the beginning of the name

I need to copy certain file from a folder A to a folder B, but I need to copy only file based on a condition. The name of the file must start with the same value as a variable I have in my stored procedure.
Here is what I've got at the moment:
DECLARE #SQLFile VARCHAR(1024)
DECLARE #MessageId INT = 3 --copy all the files from the source folder that start with this variable
DECLARE #SourceFolderPath VARCHAR(1024)
DECLARE #DestinationFolderPath VARCHAR(1024)
SET #DestinationFolderPath = '\\mydestination'
SET #SourceFolderPath = '\\mysource'
SET #SQLFile = ' COPY /Y ' + #SourceFolderPath + ' /B ' + #DestinationFolderPath
EXEC master..xp_cmdshell #SQLFile
With this code I copy all the files but I don't know if there is a way to integrate the condition of beginning of name.
Thanks
I'm guessing a simple file glob will do what you want. But as people said in the comments using xp_cmdshell can create all kinds of security issues if used where untrusted data is kicking about. For this reason it is also often blocked by SQL Server security policy.
DECLARE #SQLFile VARCHAR(1024)
DECLARE #MessageId INT = 3 --copy all the files from the source folder that start with this variable
DECLARE #SourceFolderPath VARCHAR(1024)
DECLARE #DestinationFolderPath VARCHAR(1024)
SET #DestinationFolderPath = '\\mydestination'
SET #SourceFolderPath = '\\mysource'
SET #SQLFile = ' COPY /Y ' + #SourceFolderPath + '\' + CAST(#MessageID AS varchar(max)) + '* /B ' + #DestinationFolderPath
EXEC master..xp_cmdshell #SQLFile

Import Multiple CSV Files to SQL Server from a Folder with additional Filename as additional column

I have a folder called "Data" This folder consists of various .CSV Files with same schema. The folder Location is 'C:\Data..'
I want to Import the contents of all files recursively into SQL Server in one table.
Append additional column in the table which consists for each file.
Have tried some solutions found it here
, but it didn't work.
--get the list of files to process:
SET #path = 'C:\Data\'
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM ALLFILENAMES where WHICHFILE like '%.csv%'
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
--bulk insert won't take a variable name, so make a sql and execute it instead:
set #sql = 'BULK INSERT Temp FROM ''' + #path + #filename + ''' '
+ ' WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 2
) '
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1
TWo things which aren't working as expected:
1) It doesn't work for recursive files in the directory.
2) Not able to do the bulk insert to Temp file created.

Import Multiple Different .txt Files to SQL Server from a Folder

I found so many queries online to import multiple files to SQL Server into one single table from a folder like the one below but no help online on how to insert multiple different files as different tables in SQL Server. I have 21 files (21 for now, might increase with time) and its really very tedious to import each and every file from the folder.
CREATE TABLE ALLFILENAMES(WHICHPATH VARCHAR(255),WHICHFILE varchar(255))
--some variables
declare #filename varchar(255),
#path varchar(255),
#sql varchar(8000),
#cmd varchar(1000)
--get the list of files to process:
SET #path = 'C:\Users\atp1lip\Desktop\09242017\'
SET #cmd = 'dir ' + #path + '*.txt /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM ALLFILENAMES where WHICHFILE like '%.txt%'
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
set #sql = 'BULK INSERT test FROM ''' + #path + #filename + ''' '
+ ' WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n''
) '
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1
I was wondering if its possible to do this? Any help is appreciated. Thanks!
you also can use function xp_dirtree like this:
CREATE TABLE #FilesList(ID INT IDENTITY(1,1), FileName VARCHAR(1000),Depth INT,isFile INT)
INSERT INTO #FilesList
EXEC xp_dirtree #FilePath, 1, 1

SQL BULK INSERT FROM CSV From a Folder

I have this SQL command which will be used in a stored procedure and will be scheduled:
BULK INSERT Test101.dbo.Test102
FROM 'C:\Bulk\samp.csv'
WITH
(
FIELDTERMINATOR = ',',
ROWTERMINATOR = '\n'
)
It works well but what I want to do is to process all the .csv files from a folder (let's say Bulk folder) automatically. I mean, the user doesn't have to define the exact location with filename instead the stored procedure will process all the .CSV file from that folder and ignore the other files if there were.
Please help.
Thank you.
try these to make sure you have the right permissions and settings to do xp_cmdshell
One Time config change to enable here : https://stackoverflow.com/a/5131503/2628302
--To Test if that worked run these commands. There should be no errors and should return list of files under c:\
declare #files table (ID int IDENTITY, FileName varchar(500))
insert into #files execute xp_cmdshell 'dir c:\ /b'
select * from #files
--if no errors then proceed to create this SP as shown below. This is the one that does all the work
CREATE PROCEDURE dbo.sp_BulkInsAllFilesInDirectory
AS
BEGIN
--a table to hold filenames
Declare #ALLFILENAMES as TABLE (WHICHPATH VARCHAR(255),WHICHFILE varchar(255))
--some variables
declare #filename varchar(255),
#path varchar(255),
#sql varchar(8000),
#cmd varchar(1000)
--get the list of files to process:
SET #path = 'C:\Bulk\'
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO #ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE #ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM #ALLFILENAMES where WHICHFILE like '%.csv%'
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
--bulk insert won't take a variable name, so make a sql and execute it instead:
set #sql = 'BULK INSERT Temp FROM ''' + #path + #filename + ''' '
+ ' WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 2
) '
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1
END
--- TEST it by running it like so (start with just one csv file in C:\BULK\ directory. If it works for one it will most likely work for more than one file.
EXEC dbo.sp_BulkInsAllFilesInDirectory
see if there are errors. Leave a message here and I will check tomorrow. Good luck.

How to create and populate a table in a single step as part of a CSV import operation?

I am looking for a quick-and-dirty way to import CSV files into SQL Server without having to create the table beforehand and define its columns.
Each imported CSV would be imported into its own table.
We are not concerned about data-type inferencing. The CSV vary in structure and layout, and all of them have many many columns, yet we are only concerned with a few of them: street addresses and zipcodes. We just want to get the CSV data into the SQL database quickly and extract the relevant columns.
I'd like to supply the FieldTerminator and RowTerminator, point it at the CSV, and have the utility do the rest. Is there any way to create the table and populate it, all in one step, using BULK INSERT and/or OpenRowset(BULK ... ) ?
Referencing SQLServerPedia, I think this will work:
sp_configure 'show advanced options', 1;
RECONFIGURE;
GO
sp_configure 'Ad Hoc Distributed Queries', 1;
RECONFIGURE;
GO
select TerritoryID
,TotalSales
,TotalCost
INTO CSVImportTable
from openrowset('MSDASQL'
,'Driver={Microsoft Access Text Driver (*.txt, *.csv)}'
,'select * from C:\csvtest.CSV')
Annoying, I don't have the rep points yet to just comment, so I'll add an answer based on TyT's (that handle looks terrible in possessive, btw ...)
The worker code needed a double "\" instead of a single for me to avoid a "file not found" error. And you don't have to specify the fields; they will be inferred from the first row of the file:
select *
into CsvImportTable
from openrowset(
'MSDASQL',
'Driver={Microsoft Access Text Driver (*.txt, *.csv)}',
'select * from C:\\csvtestfile.csv')
I had no problems with the Access driver.
UPDATE: If you have trouble with the types being inferred incorrectly, insert a few rows at the top of the file with data of the type you want in the table so you get, say text -> VARCHAR instead of text-> INT and then delete those rows after the import.
As the final icing, add a PK to the table so you can manipulate the data - delete the dummy rows, etc:
alter table CsvImportTable add Id int identity(1, 1)
Updated answer if you're using SQL Server Management Studio 17.
Right click on Database -> Tasks -> Import Flat File...
It will automatically infer the first row of the data as the column names. It should automatically pick up the terminators. You will get the option to set primary keys, allowing nulls, and specify data types for the columns as well.
Putting all .CSV files to a folder and running this is working well for me.
IF OBJECT_ID('dbo.ConfigFile', 'u') IS NOT NULL DROP TABLE [dbo].[ConfigFile];
IF OBJECT_ID('tempdb..#columns', 'u') IS NOT NULL DROP TABLE #columns;
CREATE TABLE ConfigFile(Path VARCHAR(255), FileName VARCHAR(255));
DECLARE #filename VARCHAR(255)
, #path VARCHAR(255)
, #cmd VARCHAR(8000);
SET #path = 'C:\FTP_DATA\Netscout\test\'; --PATH TO YOUR CSV FILES (CHANGE TO YOUR PATH)
SET #cmd = 'dir ' + #path + '*.csv /b';
INSERT INTO ConfigFile(FileName)
EXEC Master..xp_cmdShell #cmd;
DELETE from ConfigFile WHERE FileName IS NULL;
UPDATE ConfigFile SET Path = #path WHERE Path IS NULL;
DECLARE cur CURSOR
FOR SELECT Path
, FileName
FROM ConfigFile
WHERE FileName LIKE '%.csv%'
OPEN cur
FETCH NEXT FROM cur INTO #path
, #filename
WHILE ##fetch_status -1
BEGIN
CREATE TABLE #columns(HeadString NVARCHAR(MAX))
DECLARE #Columns NVARCHAR(MAX) = ''
DECLARE #Query NVARCHAR(MAX) = ''
DECLARE #QUERY2 NVARCHAR(MAX) = ''
DECLARE #HeaderQuery NVARCHAR(MAX) = ''
SELECT #HeaderQuery = 'BULK INSERT #columns FROM ''' + #path + #filename + ''' WITH(firstrow=1,lastrow=1)';
EXEC (#HeaderQuery);
SELECT #Columns = (
SELECT QUOTENAME(value) + ' nvarchar(max)' + ','
FROM #columns
CROSS APPLY STRING_SPLIT(HeadString,',') FOR xml PATH('')
)
IF ISNULL(#Columns,'') ''
BEGIN
SET #Columns = LEFT(#Columns,LEN(#Columns) - 1)
SELECT #Query = 'IF OBJECT_ID(''dbo.['+ REPLACE(#filename,'.csv','') +']'', ''u'') IS NOT NULL DROP TABLE [' + REPLACE(#filename,'.csv','') + ']'
EXEC (#QUERY)
SELECT #Query = 'CREATE TABLE [' + REPLACE(#filename,'.csv','') + '] (' + REPLACE(#Columns,'"','') + ')'
EXEC (#QUERY)
END
SELECT #QUERY2 = 'BULK INSERT [' + REPLACE(#filename,'.csv','') + '] FROM ''' + #path + #filename + '''
WITH(firstrow=2,CODEPAGE = ''65001'',FORMAT=''csv'',FIELDTERMINATOR='','',ROWTERMINATOR=''\n'')';
EXEC (#QUERY2);
DROP TABLE #columns
FETCH NEXT FROM cur INTO #path
, #filename;
END;
CLOSE cur;
DEALLOCATE cur;

Resources