I tried to run the SSIS package to load multiple flat files from the NAS drive location.
has created the variables,
File_Path with data type String and the value as G:\CnS_Pro_Migration_to_QSI-XL\ETL_\Claims\QA\CA_HH_SMART_PRO_MEDCLM_74_CSPFacets_202203C1_202202261309_56662.txt
File_Name with data type String and the value as CA_HH_SMART_PRO_MEDCLM_74_CSPFacets_202203C1_202202261309_56662.txt
File_Folder with data type String and the value as G:\CnS_Pro_Migration_to_QSI-XL\ETL_\Claims\QA\
I created a for loop container and under Collection where the folder name given as G:\CnS_Pro_Migration_to_QSI-XL\ETL_\Claims\QA\
files: *.txt
Under Variables mapping given the variable as User::File_Path.
Under the properties of connection manager for file load and in expressions given as Property as Connection String and Expression as #[User::File_Path]
But on executing the job I got the issue as
[Flat File Source [2]] Warning: The system cannot find the file specified.
[Flat File Source [2]] Error: Cannot open the datafile "CA_SMART_PRO_MEDCLM_74_CSPFacets_202203C1_202202261233_56662.txt".
[SSIS.Pipeline] Error: Flat File Source failed the pre-execute phase and returned error code 0xC020200E.
Warning: SSIS Warning Code DTS_W_MAXIMUMERRORCOUNTREACHED. The Execution method succeeded,
but the number of errors raised (2) reached the maximum allowed (1); resulting in failure.
This occurs when the number of errors reaches the number specified in MaximumErrorCount. Change the MaximumErrorCount or fix the errors.
Could you please help me out?
For an SSIS solution, see the link below.
https://www.mssqltips.com/sqlservertip/2874/loop-through-flat-files-in-sql-server-integration-services/
For a Bulk Insert option, see the link below.
https://www.mssqltips.com/sqlservertip/6458/sql-server-bulk-insert-for-multiple-csv-files-from-a-single-folder/
Here is an example of a looping Bulk Insert job, based on changing dates in the names of similar CSV files.
DECLARE #intFlag INT
SET #intFlag = 1
WHILE (#intFlag <=48)
BEGIN
PRINT #intFlag
declare #fullpath1 varchar(1000)
select #fullpath1 = '''\\source\FTP1\' + convert(varchar, getdate()- #intFlag , 112) + '_SPGT.SPL'''
declare #cmd1 nvarchar(1000)
select #cmd1 = 'bulk insert [dbo].[table1] from ' + #fullpath1 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 5, ROWTERMINATOR=''0x0a'')'
exec (#cmd1)
-------------------------------------------
declare #fullpath2 varchar(1000)
select #fullpath2 = '''\\source\FTP2\' + convert(varchar, getdate()-#intFlag, 112) + '_SPBMI_GL_PROP_USD_C.SPL'''
declare #cmd2 nvarchar(1000)
select #cmd2 = 'bulk insert [dbo].[table2] from ' + #fullpath2 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 5, ROWTERMINATOR=''0x0a'')'
exec (#cmd2)
-------------------------------------------
declare #fullpath3 varchar(1000)
select #fullpath3 = '''\\source\FTP3\' + convert(varchar, getdate()-#intFlag, 112) + '_SPBMI_GL_PROP_USD_C_ADJ.SPC'''
declare #cmd3 nvarchar(1000)
select #cmd3 = 'bulk insert [dbo].[table3] from ' + #fullpath3 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 7, ROWTERMINATOR=''0x0a'')'
exec (#cmd3)
-------------------------------------------
declare #fullpath4 varchar(1000)
select #fullpath4 = '''\\source\FTP4\' + convert(varchar, getdate()-#intFlag, 112) + '_SPGTINFRA_ADJ.SPC'''
declare #cmd4 nvarchar(1000)
select #cmd4 = 'bulk insert [dbo].[table4] from ' + #fullpath4 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 7, ROWTERMINATOR=''0x0a'')'
exec (#cmd4)
SET #intFlag = #intFlag + 1
END
GO
Cannot open the datafile "CA_SMART_PRO_MEDCLM_74_CSPFacets_202203C1_202202261233_56662.txt"
This exception means that you are not reading the fully qualified file path in the ForEach loop container. Make sure that the "fully qualified" option is selected in the container's editor.
Related
I am trying to insert data from multiple CSV files from a single folder to a single table.
I can do bulinsert for 1 file using the following code:
USE [dbname]
GO
BULK INSERT tablename
FROM 'path to csv files'
WITH
(
FIRSTROW = 2, -- as 1st one is header
FIELDTERMINATOR = ',', --CSV field delimiter
ROWTERMINATOR = '\n', --Use to shift the control to next row
TABLOCK
)
GO
The first thing I would say is I personally wouldn't do this in T-SQL, I'd use something a bit better suited to the task, the SSIS Multiple flat file connection manager for example. This is possible in T-SQL but it is not the most robust approach and has limitation (such as the directories that can even be accessed).
-- Set the directory to check for files
DECLARE #Directory NVARCHAR(200) = N'C:\Your File Location\';
-- Create a table variable to store the files output from xp_dirtree
DECLARE #Files TABLE (FileName NVARCHAR(255), Depth INT, IsFile BIT);
-- Get the files from the folder
INSERT #Files(FileName, Depth, IsFile)
EXECUTE master..xp_dirtree #Directory, 1, 1;
--select the filenames and use STRING_AGG to combine into a single string to execute
DECLARE #SQL NVARCHAR(MAX) =
( SELECT STRING_AGG(CONCAT('BULK INSERT YourTableName FROM ''',
CONCAT(#Directory, f.FileName), '''
WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 1
)'), ';' + CHAR(10))
FROM #Files AS f
WHERE f.IsFile = 1
AND f.FileName LIKE '%.csv'-- Optionally limit files
);
PRINT #SQL;
-- Uncomment below line once you're happy with the SQL Printed
--EXECUTE sp_executesql #SQL;
Or if you want a bit more control over error handling, you could use a cursor to iterate the files:
-- Set the directory to check for files
DECLARE #Directory NVARCHAR(200) = N'C:\Import Location\';
-- Create a table variable to store the files output from xp_dirtree
DECLARE #Files TABLE (FileName NVARCHAR(255), Depth INT, IsFile BIT);
-- Get the files from the folder
INSERT #Files(FileName, Depth, IsFile)
EXECUTE master..xp_dirtree #Directory, 1, 1;
--declare a cursor to loop through the files, filtered for csv
DECLARE FileCursor CURSOR LOCAL STATIC FORWARD_ONLY READ_ONLY
FOR
SELECT FilePath = CONCAT(#Directory, f.FileName)
FROM #Files AS f
WHERE f.IsFile = 1
AND f.FileName LIKE '%.csv'; -- Optionally limit files
OPEN FileCursor;
DECLARE #FilePath NVARCHAR(255);
FETCH FROM FileCursor INTO #FilePath;
WHILE ##FETCH_STATUS = 0
BEGIN
PRINT #FilePath;
DECLARE #sql NVARCHAR(MAX) =
CONCAT('BULK INSERT YourTableName FROM ''', #FilePath, '''
WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 1
)');
BEGIN TRY
EXECUTE sp_executesql #SQL;
END TRY
BEGIN CATCH
-- Do something to handle errors
END CATCH
FETCH NEXT FROM FileCursor INTO #FilePath;
END
CLOSE FileCursor;
DEALLOCATE FileCursor;
As I say though, SSIS or another dedicated ETL tool is a better choice.
While trying to use Bulk Insert with SQL server, I am receiving the following error:
Msg 4861, Level 16, State 1, Line 1
Cannot bulk load because the file " C:\Users\Khandokar\Desktop\TEST_2016_beneficiaries.txt " could not be opened. Operating system error code 123(The filename, directory name, or volume label syntax is incorrect.).
I have tried moving the file and using different paths with no success. What might the issue be? Thanks!
CREATE PROCEDURE [dbo].[addBeneficiaries]
#filePath NVARCHAR(MAX)
AS
declare #sql varchar(max)
select #sql='BULK INSERT dbo.beneficiaries FROM '' ';
select #sql = #sql + #filePath;
select #sql = #sql+ ' '' WITH(
FIRSTROW = 2,
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n''
)';
exec(#sql)
EXEC dbo.addBeneficiaries 'C:\Users\Khandokar\Desktop\TEST_2016_beneficiaries.txt'
I'm trying to create a stored procedure to import from CSV. Everything works if I have a hard coded file path, but I want to take a file path as a parameter. When I try SQL Sever Management Studio generates an error:
Incorrect syntax near '#filePath'.
(In fact, if I put anything but a pure string(eg. 'C:'+'/dir') it gives an error.)
This is a simplified version of my code:
Create procedure [importFile](#filePath varchar(Max))
AS
BEGIN
create table #Temp
(
row1 int,
row2 varchar(5),
row3 bit
)
BULK insert
#Temp
from #filePath
With(
FIELDTERMINATOR = ',',
ROWTERMINATOR = '\n'
)
...
END
Any explanation?
Use dynamic SQL to inject the file name variable into a string with the bulk insert statement and the use sp_executesqlto execute it. You might want to add some error checking to check that the path is valid and so on.
CREATE PROCEDURE [importFile] (#filePath VARCHAR(MAX))
AS
BEGIN
CREATE TABLE #Temp
(
row1 int,
row2 varchar(5),
row3 bit
)
DECLARE #SQL NVARCHAR(MAX) = ''
SET #SQL = N'
BULK INSERT #Temp
FROM ''' + #filePath + '''
WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n''
)'
-- ...
EXEC sp_executesql #SQL
END
-- to run it:
EXEC importFile 'd:\test.csv'
I have this code in an SP that sets up a bulk insert:
begin try
declare #sentFile nvarchar(255)
declare #bulk_cmd nvarchar(1000) = ''
declare #loadDate nvarchar(8) = Convert(nvarchar(8),#p_loadDate) -- #p_loadDate is char(8)
set #StrImportFolder = N'D:\EMVImports\'
set #sentFile = #StrImportFolder + N'etl_rnli_sent_'+ #loadDate + N'.txt'
SET #bulk_cmd = N'BULK INSERT loadSent
FROM ''' + #sentFile + N'''
WITH (
FIRSTROW = 2
,formatfile=''D:\EMVScripts\Sent_Format.xml''
)'
Print #bulk_cmd
EXECUTE sp_executesql #bulk_cmd
-- more stuff happens here
end try
Inside my stored procedure, this fails with this error:
Cannot fetch a row from OLE DB provider "BULK" for linked server "(null)".
But the code printed out:
BULK INSERT loadSent
FROM 'D:\EMVImports\etl_sent_20130529.txt'
WITH (
FIRSTROW = 2
,formatfile='D:\EMVScripts\Sent_Format.xml'
)
works like a charm. I've no idea why it fails under sp_executesql.
I am using much similar query. And it is working.
DECLARE #filepath nvarchar(500)
SET #filepath = N'e:\5-digit Commercial.csv'
DECLARE #bulkinsert NVARCHAR(2000)
SET #bulkinsert =
N'BULK INSERT ZIPCodes FROM ''' +
#filepath +
N''' WITH (FIRSTROW = 2, FIELDTERMINATOR = '','', ROWTERMINATOR = ''\n'')'
EXEC sp_executesql #bulkinsert
How do you set the value of #sentFile?
We can not set the 'From' file path dynamically for BULK INSERT.
You are generating the path dynamically
set #sentFile = #StrImportFolder + N'etl_rnli_sent_'+ #loadDate + N'.txt'
Here the #loadDate is a variable component of the file name.
The working example given above uses fixed path event with a variable:
SET #filepath = N'e:\5-digit Commercial.csv',
Here the variable has a fix path for every case.
So, try to use a pre-define file path.
I am looking for a quick-and-dirty way to import CSV files into SQL Server without having to create the table beforehand and define its columns.
Each imported CSV would be imported into its own table.
We are not concerned about data-type inferencing. The CSV vary in structure and layout, and all of them have many many columns, yet we are only concerned with a few of them: street addresses and zipcodes. We just want to get the CSV data into the SQL database quickly and extract the relevant columns.
I'd like to supply the FieldTerminator and RowTerminator, point it at the CSV, and have the utility do the rest. Is there any way to create the table and populate it, all in one step, using BULK INSERT and/or OpenRowset(BULK ... ) ?
Referencing SQLServerPedia, I think this will work:
sp_configure 'show advanced options', 1;
RECONFIGURE;
GO
sp_configure 'Ad Hoc Distributed Queries', 1;
RECONFIGURE;
GO
select TerritoryID
,TotalSales
,TotalCost
INTO CSVImportTable
from openrowset('MSDASQL'
,'Driver={Microsoft Access Text Driver (*.txt, *.csv)}'
,'select * from C:\csvtest.CSV')
Annoying, I don't have the rep points yet to just comment, so I'll add an answer based on TyT's (that handle looks terrible in possessive, btw ...)
The worker code needed a double "\" instead of a single for me to avoid a "file not found" error. And you don't have to specify the fields; they will be inferred from the first row of the file:
select *
into CsvImportTable
from openrowset(
'MSDASQL',
'Driver={Microsoft Access Text Driver (*.txt, *.csv)}',
'select * from C:\\csvtestfile.csv')
I had no problems with the Access driver.
UPDATE: If you have trouble with the types being inferred incorrectly, insert a few rows at the top of the file with data of the type you want in the table so you get, say text -> VARCHAR instead of text-> INT and then delete those rows after the import.
As the final icing, add a PK to the table so you can manipulate the data - delete the dummy rows, etc:
alter table CsvImportTable add Id int identity(1, 1)
Updated answer if you're using SQL Server Management Studio 17.
Right click on Database -> Tasks -> Import Flat File...
It will automatically infer the first row of the data as the column names. It should automatically pick up the terminators. You will get the option to set primary keys, allowing nulls, and specify data types for the columns as well.
Putting all .CSV files to a folder and running this is working well for me.
IF OBJECT_ID('dbo.ConfigFile', 'u') IS NOT NULL DROP TABLE [dbo].[ConfigFile];
IF OBJECT_ID('tempdb..#columns', 'u') IS NOT NULL DROP TABLE #columns;
CREATE TABLE ConfigFile(Path VARCHAR(255), FileName VARCHAR(255));
DECLARE #filename VARCHAR(255)
, #path VARCHAR(255)
, #cmd VARCHAR(8000);
SET #path = 'C:\FTP_DATA\Netscout\test\'; --PATH TO YOUR CSV FILES (CHANGE TO YOUR PATH)
SET #cmd = 'dir ' + #path + '*.csv /b';
INSERT INTO ConfigFile(FileName)
EXEC Master..xp_cmdShell #cmd;
DELETE from ConfigFile WHERE FileName IS NULL;
UPDATE ConfigFile SET Path = #path WHERE Path IS NULL;
DECLARE cur CURSOR
FOR SELECT Path
, FileName
FROM ConfigFile
WHERE FileName LIKE '%.csv%'
OPEN cur
FETCH NEXT FROM cur INTO #path
, #filename
WHILE ##fetch_status -1
BEGIN
CREATE TABLE #columns(HeadString NVARCHAR(MAX))
DECLARE #Columns NVARCHAR(MAX) = ''
DECLARE #Query NVARCHAR(MAX) = ''
DECLARE #QUERY2 NVARCHAR(MAX) = ''
DECLARE #HeaderQuery NVARCHAR(MAX) = ''
SELECT #HeaderQuery = 'BULK INSERT #columns FROM ''' + #path + #filename + ''' WITH(firstrow=1,lastrow=1)';
EXEC (#HeaderQuery);
SELECT #Columns = (
SELECT QUOTENAME(value) + ' nvarchar(max)' + ','
FROM #columns
CROSS APPLY STRING_SPLIT(HeadString,',') FOR xml PATH('')
)
IF ISNULL(#Columns,'') ''
BEGIN
SET #Columns = LEFT(#Columns,LEN(#Columns) - 1)
SELECT #Query = 'IF OBJECT_ID(''dbo.['+ REPLACE(#filename,'.csv','') +']'', ''u'') IS NOT NULL DROP TABLE [' + REPLACE(#filename,'.csv','') + ']'
EXEC (#QUERY)
SELECT #Query = 'CREATE TABLE [' + REPLACE(#filename,'.csv','') + '] (' + REPLACE(#Columns,'"','') + ')'
EXEC (#QUERY)
END
SELECT #QUERY2 = 'BULK INSERT [' + REPLACE(#filename,'.csv','') + '] FROM ''' + #path + #filename + '''
WITH(firstrow=2,CODEPAGE = ''65001'',FORMAT=''csv'',FIELDTERMINATOR='','',ROWTERMINATOR=''\n'')';
EXEC (#QUERY2);
DROP TABLE #columns
FETCH NEXT FROM cur INTO #path
, #filename;
END;
CLOSE cur;
DEALLOCATE cur;