export outlook calendar event using applescript - calendar

I try to export calendar event to file .ics format but some error:
property byCategory : "By category"
property byPattern : "Names matching pattern"
property icsFormat : "VCalendar"
property outputFolderPath : the path to the desktop
-------------------------------------------
-- main
--
set calendarToExport to {}
-- Get the calendar selection
set calendarList to my ListCalendars()
display alert (count of calendarList)
if the (count of calendarList) is greater than 1 then
-- build calendar listing
set displayList to {}
repeat with c from 1 to the count of calendarList
set nextCal to calendarList's item c
set the end of displayList to {(c as text) & " - \"" & nextCal's cName & "\" (" & nextCal's cKind & ")"}
--display alert (c as text) & " - \"" & nextCal's cName & "\" (" & nextCal's cKind & ")"
end repeat
-- ask user to choose
set dResult to choose from list displayList with title "Choose Calendar(s)" with prompt "List Of Calendar:" OK button name "Export" with multiple selections allowed
-- generate chosen list
set chosenList to {}
repeat with nextItem in dResult
set the end of chosenList to (the first word of (nextItem as text) as number)
end repeat
if chosenList is not {} then
-- process each calendar chosen by the user
repeat with nextChoice in chosenList
set nextCal to calendarList's item nextChoice
set calendarToExport to ProcessCalendar(nextCal's cID, nextCal's cName, nextCal's cKind)
display alert (count of calendarToExport)
export_to_vcard(calendarToExport)
end repeat
end if
end if
--set calendarToExport to get_calendar_to_export()
--display alert (count of calendarToExport)
--if (count of calendarToExport) is 0 then
-- display alert "Please select calendar to export and rerun script" as warning
-- return
--end if
-- Shall we export to ics?
set theFormat to "VCalendar"
if theFormat is icsFormat then
-- export_to_ics(calendarToExport, theFormat)
else
display alert "Invalid format" as warning
end if
return
-------------------------------------------
-- get_calendar_to_export()
--
--
on ProcessCalendar(calID, calName, calKind)
tell application "Microsoft Outlook"
set thisCalendar to calendar id calID
set subjectList to every calendar event of thisCalendar
end tell
return subjectList
end ProcessCalendar
on ListCalendars()
set cals to {}
tell application "Microsoft Outlook"
repeat with nextCal in the every calendar
set the end of cals to {cID:nextCal's id, cName:nextCal's name, cKind:nextCal's class}
end repeat
end tell
return cals
end ListCalendars
on get_calendar_to_export()
set selectedCalendar to {}
tell application "Microsoft Outlook"
set theSelection to ListCalendars()
set testval to theSelection as string
display alert testval
if class of theSelection is list then
if class of the first item of theSelection is calendar then
copy theSelection to selectedCalendar
end if
else
if class of theSelection is calendar then
copy theSelection to selectedCalendar
end if
end if
--display alert selectedCalendar as text
return selectedCalendar
end tell
end get_calendar_to_export
-------------------------------------------
-------------------------------------------
-- export_to_ics()
--
--
on export_to_vcard(theCalendar)
set vcalendars to {}
tell application "Microsoft Outlook"
-- set myCounter to 1
repeat with aCalendar in theCalendar
-- set fileNew to ("a:mycountfinal.ics")
copy vevent of aCalendar to the end of vcalendars
end repeat
end tell
set myCounter to 1
repeat with aCard in vcalendars
set fileNew to ("a:MyCont" & myCounter as string) & ".ics"
set this_story to aCard
set this_file to (((path to desktop folder) as text) & fileNew)
my write_to_file(this_story, this_file, true)
set myCounter to myCounter + 1
end repeat
display alert "Complete"
end export_to_vcard
on write_to_file(this_data, target_file, append_data) -- (string, file path as string, boolean)
try
set the target_file to the target_file as text
set the open_target_file to ¬
open for access file target_file with write permission
if append_data is false then ¬
set eof of the open_target_file to 0
write this_data to the open_target_file starting at eof
close access the open_target_file
return true
on error
try
close access file target_file
end try
return false
end try
end write_to_file -------------------------------------------
-- replace_text()
on replace_text(sourceStr, searchString, replaceString)
set searchStr to (searchString as text)
set replaceStr to (replaceString as text)
set sourceStr to (sourceStr as text)
set saveDelims to AppleScript's text item delimiters
set AppleScript's text item delimiters to (searchString)
set theList to (every text item of sourceStr)
set AppleScript's text item delimiters to (replaceString)
set theString to theList as string
set AppleScript's text item delimiters to saveDelims
return theString
end replace_text
Error:
microsoft Outlook got an error : can't make vevent of calendar event id 5680 into type specifier

ok so vevent is actually a list of calander events so it looks like you just need to loop through those and copy them to the new list
-------------------------------------------
-------------------------------------------
-- export_to_ics()
--
--
on export_to_vcard(theCalendar)
set vcalendars to {}
tell application "Microsoft Outlook"
-- set myCounter to 1
repeat with anEvent in theCalendar
-- set fileNew to ("a:mycountfinal.ics")
copy anEvent to the end of vcalendars
end repeat
end tell
set myCounter to 1
repeat with aCard in vcalendars
set fileNew to ("a:MyCont" & myCounter as string) & ".ics"
set this_story to aCard
set this_file to (((path to desktop folder) as text) & fileNew)
my write_to_file(this_story, this_file, true)
set myCounter to myCounter + 1
end repeat
display alert "Complete"
end export_to_vcard

Related

Why does my Applescript loop fail almost all the time? Oddly, it works sometimes

The code below is used to run a sample text in TextEdit. There about 14 iterations and the loop can fail in any one of them. Below is an updated simplified version of the script. Its still giving me the same problem
Compare the code in the new image with the dialog result
set AppleScript's text item delimiters to ":"
tell application "TextEdit"
activate
tell the front document
set nameList to the first paragraph
set remindersList to {}
set lineCount to count the paragraphs
repeat with i from 2 to lineCount
set reminderName to paragraph i
set end of remindersList to reminderName
end repeat
end tell
end tell
tell application "Reminders"
set newList to make new list
set name of newList to nameList
set reminderDate to date
set listAccount to account
lineCount = lineCount - 2
repeat with i from 1 to lineCount
tell list nameList
set newremin to make new reminder
set reminderName to item i of remindersList
set {remName, remBody1, remBody2} to {text item 1, text item 2, text item 3} of reminderName
set name of newremin to remName
set body of newremin to remBody1 & return & remBody2
set due date of newremin to (current date) + (1 * days)
end tell
end repeat
end tell
set AppleScript's text item delimiters to ""

Comboboxes visual basic 6

I have multiple comboboxes where users have the option of selecting an item. If no item is selected I insert NULL into SQL-SERVER:
if cboSchool.text="" then
g_strSQL = g_strSQL & "NULL,"
else
g_strSQL = g_strSQL & "'" & cboschool.itemdata(cboschool.listindex) & "',"
End if
My problem is as follow: Later on I allow the user to edit the information that they previously didn't select. So later on when they want to edit the information, I need to be able to have the application realize that there was a value or there wasn't a value in the table in (SQLSERVER), compare if its different from the value that was selected. And if the information is different then need to update the table. This is what my code looks for EDIT:
If g_RS!SchoolID <>cboSchool.ItemData(cboSchool.ListIndex)Then
g_strSQL2 = g_strSQL2 & " School ID = '" & cboSchool.ItemData (cboSchool.ListIndex) & "',"
End If
The problem Im seeing is that g_RS!SCHOOLID shows as "NULL", however it does notice that the value is different from cboschool.itemdata and it just skips to the end of the if statement. I don't understand how it doesn't see the difference.
If Val("" & g_RS!SchoolID) <> cboSchool.ItemData(cboSchool.ListIndex) Then
It's because of the null value that it doesn't see a difference.
You could have:
If g_RS!SchoolID <> "Hello world!" Then
g_strSQL2 = "DROP TABLE X"
Else
MsgBox ("g_RS!SchoolID = Hello world!")
End If
and as long as SchoolID is NULL you'd get a messagebox.
You could fix this with something like:
If Iif(IsNull(g_RS!SchoolID), "", g_RS!SchoolID) <> cboSchool.ItemData(cboSchool.ListIndex) Then
'do stuff
End If

SqlHelper.ExecuteReader() throws an exception when no results found

I have an SqlDataReader that is declared like this:
Dim myReader As SqlDataReader
myReader = SqlHelper.ExecuteReader(ConnectionString, "storedProcedure1", CInt(myTextBox.Text))
Later I use the results like this:
If myReader.HasRows Then
While myReader.Read()
Row = Table1.NewRow()
Row.Item("REF") = myReader.GetString(0)
Row.Item("CD") = myReader.GetString(1)
Row.Item("NAME") = myReader.GetString(2)
Row.Item("KEY") = myReader.GetDecimal(3)
Row.Item("STRING") = myReader.GetString(0) & " - " & myReader.GetString(1) & " - " & myReader.GetString(2).ToString().Replace("'", "") & " - " & myReader.GetString(4).ToString().Replace("'", "")
Table1.Rows.Add(Row)
'Fill Drop Down
drpMenu.Items.Add(New ListItem(myReader.GetString(0) & " - " & myReader.GetString(1) & " - " & myReader.GetString(2).ToString().Replace("'", "") & " - " & myReader.GetString(4).ToString().Replace("'", "")))
End While
End If
myTextBox is a textbox that the user enters a possible location number that gets searched for using the stored procedure. If the user enters a valid location number, this works great and I have no problems. If they enter a non-existent location number, I get an exception:
System.IndexOutOfRangeException: Index was outside the bounds of the array.
I would think that the If myReader.HasRows line would keep me from trying to read and manipulate results that don't exist but there must be something I'm missing. myTextBox is already being validated elsewhere in the code to make sure the user typed in an integer without any wacky characters so bad input doesn't seem to be the problem either.
How do I find out whether the location number exists before calling SqlHelper.ExecuteReader()? Or maybe the better question is how do I gracefully handle this exception and tell the user the location wasn't found?
EDIT: Here's the stored procedure.
ALTER PROCEDURE [dbo].[storedProcedure]
-- Add the parameters for the stored procedure here
#MBR as integer
AS
BEGIN
EXEC ('{CALL RM#IMLIB.spGETLOC( ?)}', #MBR) at AS400
END
EDIT #2: When I run the stored procedure in SMS and pass a valid location, it returns what I expect. If I pass an invalid location number, it returns nothing.
First. create a local variable...and cast the textbox value to the local variable...to make sure that isn't the error.
dim myValue as Int32
myValue = '' convert textbox value to an int.
Second...typically, my datareader code looks like this.
If (Not ( reader Is Nothing) ) then
If reader.HasRows Then
Do While reader.Read()
Console.WriteLine(reader.GetInt32(0) _
& vbTab & reader.GetString(1))
Loop
End If
End If
(You'll have to adjust the GetInt32 or GetString and the ordinal number to your specific case of course).
My guess is that your child-procedure (RM#IMLIB.spGETLOC) has logic in it that does NOT return a row if there isn't a match.
You can still return a result....that has no rows in it. ** (Read that again).
For example
Select ColA, ColB from dbo.MyTable where 0=1
This will return a result, with no rows. That is different from not returning a(ny) select statement..(Read that again)
My guess is that this little nuance is where you get an issue.
APPEND:
If you cannot change the child-stored procedure....
Create a #TempTable...
Populate the #TempTable with the child-stored procedure.
Do a select from the #TempTable.
Here is a generic example:
IF OBJECT_ID('tempdb..#TempOrders') IS NOT NULL
begin
drop table #TempOrders
end
CREATE TABLE #TempOrders
(
ColumnA int
, [ColumnB] nchar(5)
)
/* Note, your #temp table must have the exact same columns as returned by the child procedure */
INSERT INTO #TempOrders ( ColumnA, ColumnB )
exec dbo.uspChildProcedure ParameterOne
Select ColumnA, ColumnB from #TempOrders
IF OBJECT_ID('tempdb..#TempOrderDetails') IS NOT NULL
begin
drop table #TempOrderDetails
end
I finally figured out my issue.
Later in my code, I had a DataRow array that was empty if the location couldn't be found. I was getting the exception because it was trying to grab an Item from array(0) which makes complete sense.
I missed it initially because I thought it was a DataRow, not a DataRow array. Man, I hate fixing up code I didn't write...

Select all in current view in Filemaker Pro 12

I have a Filemaker Pro 12 database that I can sort and export selections using check boxes, I have a script to "Select none" and remove all the ticked items, but I would like to be able to search and then check all the results.
I have a button on the checkbox image that performs a Set field and the following:
Case (
ValueCount ( FilterValues ( Table::Checkbox ; Table::ID ) ) > 0;
Substitute ( Table::Checkbox ; Table::ID & ¶ ; "" ) ;
Table::Checkbox & Table::ID & ¶
)
Conditional formatting of the checkbox is:
not ValueCount ( FilterValues ( Table::Checkbox ; Table::ID ) ) > 0
The script for "Select none" is:
Set Field [Table::Checkbox; ""]
So what would the "Select all" script need to be?
There are quite a few methods to collect values across a found set into a return-delimited list. Looping is fine if your found set is fairly small; otherwise it may prove too slow.
Since your target is a global field anyway, you could use a simple:
Replace Field Contents [ No dialog; Table::gCheckbox; List ( Table::gCheckbox ; Table::ID ) ]
This will append the current found-set's values to the existing list. To start anew, begin your script by:
Set Field [ Table::gCheckbox; "" ]
Note:
In version 13, you can use the new summary field option of "List".
Caveat:
Make sure you have a backup while you experiment with Replace Field Contents[]; there is no undo.
You could write a script to walk through the current found set and get all of the IDs:
Set Variable [$currentRecord ; Get(RecordNumber)]
#
Goto Record [First]
Loop
Set Variable [$ids ; Table::ID & ¶ & $ids ]
Go To Record [Next ; Exit after Last]
End Loop
#
Go To Record [By Calculation ; $currentRecord]
#
Set Field [Table::Checkbox ; $ids ]
This method would save your current position, walk through the current found set, compile the ids into a variable, return you to your position, and set the checkbox field.

SQL Server Bulk insert of CSV file with inconsistent quotes

Is it possible to BULK INSERT (SQL Server) a CSV file in which the fields are only OCCASSIONALLY surrounded by quotes? Specifically, quotes only surround those fields that contain a ",".
In other words, I have data that looks like this (the first row contain headers):
id, company, rep, employees
729216,INGRAM MICRO INC.,"Stuart, Becky",523
729235,"GREAT PLAINS ENERGY, INC.","Nelson, Beena",114
721177,GEORGE WESTON BAKERIES INC,"Hogan, Meg",253
Because the quotes aren't consistent, I can't use '","' as a delimiter, and I don't know how to create a format file that accounts for this.
I tried using ',' as a delimter and loading it into a temporary table where every column is a varchar, then using some kludgy processing to strip out the quotes, but that doesn't work either, because the fields that contain ',' are split into multiple columns.
Unfortunately, I don't have the ability to manipulate the CSV file beforehand.
Is this hopeless?
Many thanks in advance for any advice.
By the way, i saw this post SQL bulk import from csv, but in that case, EVERY field was consistently wrapped in quotes. So, in that case, he could use ',' as a delimiter, then strip out the quotes afterwards.
It isn't possible to do a bulk insert for this file, from MSDN:
To be usable as a data file for bulk import, a CSV file must comply with the following restrictions:
Data fields never contain the field terminator.
Either none or all of the values in a data field are enclosed in quotation marks ("").
(http://msdn.microsoft.com/en-us/library/ms188609.aspx)
Some simple text processing should be all that's required to get the file ready for import. Alternatively your users could be required to either format the file according to the se guidelines or use something other than a comma as a delimiter (e.g |)
You are going to need to preprocess the file, period.
If you really really need to do this, here is the code. I wrote this because I absolutely had no choice. It is utility code and I'm not proud of it, but it works. The approach is not to get SQL to understand quoted fields, but instead manipulate the file to use an entirely different delimiter.
EDIT: Here is the code in a github repo. It's been improved and now comes with unit tests! https://github.com/chrisclark/Redelim-it
This function takes an input file and will replace all field-delimiting commas (NOT commas inside quoted-text fields, just the actual delimiting ones) with a new delimiter. You can then tell sql server to use the new field delimiter instead of a comma. In the version of the function here, the placeholder is <TMP> (I feel confident this will not appear in the original csv - if it does, brace for explosions).
Therefore after running this function you import in sql by doing something like:
BULK INSERT MyTable
FROM 'C:\FileCreatedFromThisFunction.csv'
WITH
(
FIELDTERMINATOR = '<*TMP*>',
ROWTERMINATOR = '\n'
)
And without further ado, the terrible, awful function that I apologize in advance for inflicting on you (edit - I've posted a working program that does this instead of just the function on my blog here):
Private Function CsvToOtherDelimiter(ByVal InputFile As String, ByVal OutputFile As String) As Integer
Dim PH1 As String = "<*TMP*>"
Dim objReader As StreamReader = Nothing
Dim count As Integer = 0 'This will also serve as a primary key'
Dim sb As New System.Text.StringBuilder
Try
objReader = New StreamReader(File.OpenRead(InputFile), System.Text.Encoding.Default)
Catch ex As Exception
UpdateStatus(ex.Message)
End Try
If objReader Is Nothing Then
UpdateStatus("Invalid file: " & InputFile)
count = -1
Exit Function
End If
'grab the first line
Dim line = reader.ReadLine()
'and advance to the next line b/c the first line is column headings
If hasHeaders Then
line = Trim(reader.ReadLine)
End If
While Not String.IsNullOrEmpty(line) 'loop through each line
count += 1
'Replace commas with our custom-made delimiter
line = line.Replace(",", ph1)
'Find a quoted part of the line, which could legitimately contain commas.
'In that case we will need to identify the quoted section and swap commas back in for our custom placeholder.
Dim starti = line.IndexOf(ph1 & """", 0)
If line.IndexOf("""",0) = 0 then starti=0
While starti > -1 'loop through quoted fields
Dim FieldTerminatorFound As Boolean = False
'Find end quote token (originally a ",)
Dim endi As Integer = line.IndexOf("""" & ph1, starti)
If endi < 0 Then
FieldTerminatorFound = True
If endi < 0 Then endi = line.Length - 1
End If
While Not FieldTerminatorFound
'Find any more quotes that are part of that sequence, if any
Dim backChar As String = """" 'thats one quote
Dim quoteCount = 0
While backChar = """"
quoteCount += 1
backChar = line.Chars(endi - quoteCount)
End While
If quoteCount Mod 2 = 1 Then 'odd number of quotes. real field terminator
FieldTerminatorFound = True
Else 'keep looking
endi = line.IndexOf("""" & ph1, endi + 1)
End If
End While
'Grab the quoted field from the line, now that we have the start and ending indices
Dim source = line.Substring(starti + ph1.Length, endi - starti - ph1.Length + 1)
'And swap the commas back in
line = line.Replace(source, source.Replace(ph1, ","))
'Find the next quoted field
' If endi >= line.Length - 1 Then endi = line.Length 'During the swap, the length of line shrinks so an endi value at the end of the line will fail
starti = line.IndexOf(ph1 & """", starti + ph1.Length)
End While
line = objReader.ReadLine
End While
objReader.Close()
SaveTextToFile(sb.ToString, OutputFile)
Return count
End Function
I found the answer by Chris very helpful, but I wanted to run it from within SQL Server using T-SQL (and not using CLR), so I converted his code to T-SQL code. But then I took it one step further by wrapping everything up in a stored procedure that did the following:
use bulk insert to initially import the CSV file
clean up the lines using Chris's code
return the results in a table format
For my needs, I further cleaned up the lines by removing quotes around values and converting two double quotes to one double quote (I think that's the correct method).
CREATE PROCEDURE SSP_CSVToTable
-- Add the parameters for the stored procedure here
#InputFile nvarchar(4000)
, #FirstLine int
AS
BEGIN
-- SET NOCOUNT ON added to prevent extra result sets from
-- interfering with SELECT statements.
SET NOCOUNT ON;
--convert the CSV file to a table
--clean up the lines so that commas are handles correctly
DECLARE #sql nvarchar(4000)
DECLARE #PH1 nvarchar(50)
DECLARE #LINECOUNT int -- This will also serve as a primary key
DECLARE #CURLINE int
DECLARE #Line nvarchar(4000)
DECLARE #starti int
DECLARE #endi int
DECLARE #FieldTerminatorFound bit
DECLARE #backChar nvarchar(4000)
DECLARE #quoteCount int
DECLARE #source nvarchar(4000)
DECLARE #COLCOUNT int
DECLARE #CURCOL int
DECLARE #ColVal nvarchar(4000)
-- new delimiter
SET #PH1 = '†'
-- create single column table to hold each line of file
CREATE TABLE [#CSVLine]([line] nvarchar(4000))
-- bulk insert into temp table
-- cannot use variable path with bulk insert
-- so we must run using dynamic sql
SET #Sql = 'BULK INSERT #CSVLine
FROM ''' + #InputFile + '''
WITH
(
FIRSTROW=' + CAST(#FirstLine as varchar) + ',
FIELDTERMINATOR = ''\n'',
ROWTERMINATOR = ''\n''
)'
-- run dynamic statement to populate temp table
EXEC(#sql)
-- get number of lines in table
SET #LINECOUNT = ##ROWCOUNT
-- add identity column to table so that we can loop through it
ALTER TABLE [#CSVLine] ADD [RowId] [int] IDENTITY(1,1) NOT NULL
IF #LINECOUNT > 0
BEGIN
-- cycle through each line, cleaning each line
SET #CURLINE = 1
WHILE #CURLINE <= #LINECOUNT
BEGIN
-- get current line
SELECT #line = line
FROM #CSVLine
WHERE [RowId] = #CURLINE
-- Replace commas with our custom-made delimiter
SET #Line = REPLACE(#Line, ',', #PH1)
-- Find a quoted part of the line, which could legitimately contain commas.
-- In that case we will need to identify the quoted section and swap commas back in for our custom placeholder.
SET #starti = CHARINDEX(#PH1 + '"' ,#Line, 0)
If CHARINDEX('"', #Line, 0) = 0 SET #starti = 0
-- loop through quoted fields
WHILE #starti > 0
BEGIN
SET #FieldTerminatorFound = 0
-- Find end quote token (originally a ",)
SET #endi = CHARINDEX('"' + #PH1, #Line, #starti) -- sLine.IndexOf("""" & PH1, starti)
IF #endi < 1
BEGIN
SET #FieldTerminatorFound = 1
If #endi < 1 SET #endi = LEN(#Line) - 1
END
WHILE #FieldTerminatorFound = 0
BEGIN
-- Find any more quotes that are part of that sequence, if any
SET #backChar = '"' -- thats one quote
SET #quoteCount = 0
WHILE #backChar = '"'
BEGIN
SET #quoteCount = #quoteCount + 1
SET #backChar = SUBSTRING(#Line, #endi-#quoteCount, 1) -- sLine.Chars(endi - quoteCount)
END
IF (#quoteCount % 2) = 1
BEGIN
-- odd number of quotes. real field terminator
SET #FieldTerminatorFound = 1
END
ELSE
BEGIN
-- keep looking
SET #endi = CHARINDEX('"' + #PH1, #Line, #endi + 1) -- sLine.IndexOf("""" & PH1, endi + 1)
END
END
-- Grab the quoted field from the line, now that we have the start and ending indices
SET #source = SUBSTRING(#Line, #starti + LEN(#PH1), #endi - #starti - LEN(#PH1) + 1)
-- sLine.Substring(starti + PH1.Length, endi - starti - PH1.Length + 1)
-- And swap the commas back in
SET #Line = REPLACE(#Line, #source, REPLACE(#source, #PH1, ','))
--sLine.Replace(source, source.Replace(PH1, ","))
-- Find the next quoted field
-- If endi >= line.Length - 1 Then endi = line.Length 'During the swap, the length of line shrinks so an endi value at the end of the line will fail
SET #starti = CHARINDEX(#PH1 + '"', #Line, #starti + LEN(#PH1))
--sLine.IndexOf(PH1 & """", starti + PH1.Length)
END
-- get table based on current line
IF OBJECT_ID('tempdb..#Line') IS NOT NULL
DROP TABLE #Line
-- converts a delimited list into a table
SELECT *
INTO #Line
FROM dbo.iter_charlist_to_table(#Line,#PH1)
-- get number of columns in line
SET #COLCOUNT = ##ROWCOUNT
-- dynamically create CSV temp table to hold CSV columns and lines
-- only need to create once
IF OBJECT_ID('tempdb..#CSV') IS NULL
BEGIN
-- create initial structure of CSV table
CREATE TABLE [#CSV]([Col1] nvarchar(100))
-- dynamically add a column for each column found in the first line
SET #CURCOL = 1
WHILE #CURCOL <= #COLCOUNT
BEGIN
-- first column already exists, don't need to add
IF #CURCOL > 1
BEGIN
-- add field
SET #sql = 'ALTER TABLE [#CSV] ADD [Col' + Cast(#CURCOL as varchar) + '] nvarchar(100)'
--print #sql
-- this adds the fields to the temp table
EXEC(#sql)
END
-- go to next column
SET #CURCOL = #CURCOL + 1
END
END
-- build dynamic sql to insert current line into CSV table
SET #sql = 'INSERT INTO [#CSV] VALUES('
-- loop through line table, dynamically adding each column value
SET #CURCOL = 1
WHILE #CURCOL <= #COLCOUNT
BEGIN
-- get current column
Select #ColVal = str
From #Line
Where listpos = #CURCOL
IF LEN(#ColVal) > 0
BEGIN
-- remove quotes from beginning if exist
IF LEFT(#ColVal,1) = '"'
SET #ColVal = RIGHT(#ColVal, LEN(#ColVal) - 1)
-- remove quotes from end if exist
IF RIGHT(#ColVal,1) = '"'
SET #ColVal = LEFT(#ColVal, LEN(#ColVal) - 1)
END
-- write column value
-- make value sql safe by replacing single quotes with two single quotes
-- also, replace two double quotes with a single double quote
SET #sql = #sql + '''' + REPLACE(REPLACE(#ColVal, '''',''''''), '""', '"') + ''''
-- add comma separater except for the last record
IF #CURCOL <> #COLCOUNT
SET #sql = #sql + ','
-- go to next column
SET #CURCOL = #CURCOL + 1
END
-- close sql statement
SET #sql = #sql + ')'
--print #sql
-- run sql to add line to table
EXEC(#sql)
-- move to next line
SET #CURLINE = #CURLINE + 1
END
END
-- return CSV table
SELECT * FROM [#CSV]
END
GO
The stored procedure makes use of this helper function that parses a string into a table (thanks Erland Sommarskog!):
CREATE FUNCTION [dbo].[iter_charlist_to_table]
(#list ntext,
#delimiter nchar(1) = N',')
RETURNS #tbl TABLE (listpos int IDENTITY(1, 1) NOT NULL,
str varchar(4000),
nstr nvarchar(2000)) AS
BEGIN
DECLARE #pos int,
#textpos int,
#chunklen smallint,
#tmpstr nvarchar(4000),
#leftover nvarchar(4000),
#tmpval nvarchar(4000)
SET #textpos = 1
SET #leftover = ''
WHILE #textpos <= datalength(#list) / 2
BEGIN
SET #chunklen = 4000 - datalength(#leftover) / 2
SET #tmpstr = #leftover + substring(#list, #textpos, #chunklen)
SET #textpos = #textpos + #chunklen
SET #pos = charindex(#delimiter, #tmpstr)
WHILE #pos > 0
BEGIN
SET #tmpval = ltrim(rtrim(left(#tmpstr, #pos - 1)))
INSERT #tbl (str, nstr) VALUES(#tmpval, #tmpval)
SET #tmpstr = substring(#tmpstr, #pos + 1, len(#tmpstr))
SET #pos = charindex(#delimiter, #tmpstr)
END
SET #leftover = #tmpstr
END
INSERT #tbl(str, nstr) VALUES (ltrim(rtrim(#leftover)), ltrim(rtrim(#leftover)))
RETURN
END
Here's how I call it from T-SQL. In this case, I'm inserting the results into a temp table, so I create the temp table first:
-- create temp table for file import
CREATE TABLE #temp
(
CustomerCode nvarchar(100) NULL,
Name nvarchar(100) NULL,
[Address] nvarchar(100) NULL,
City nvarchar(100) NULL,
[State] nvarchar(100) NULL,
Zip nvarchar(100) NULL,
OrderNumber nvarchar(100) NULL,
TimeWindow nvarchar(100) NULL,
OrderType nvarchar(100) NULL,
Duration nvarchar(100) NULL,
[Weight] nvarchar(100) NULL,
Volume nvarchar(100) NULL
)
-- convert the CSV file into a table
INSERT #temp
EXEC [dbo].[SSP_CSVToTable]
#InputFile = #FileLocation
,#FirstLine = #FirstImportRow
I haven't tested the performance much, but it works well for what I need - importing CSV files with less than 1000 rows. However, it might choke on really large files.
Hopefully someone else also finds it useful.
Cheers!
I have also created a function to convert a CSV to a usable format for Bulk Insert. I used the answered post by Chris Clark as a starting point to create the following C# function.
I ended up using a regular expression to find the fields. I then recreated the file line by line, writing it to a new file as I went, thus avoiding having the entire file loaded into memory.
private void CsvToOtherDelimiter(string CSVFile, System.Data.Linq.Mapping.MetaTable tbl)
{
char PH1 = '|';
StringBuilder ln;
//Confirm file exists. Else, throw exception
if (File.Exists(CSVFile))
{
using (TextReader tr = new StreamReader(CSVFile))
{
//Use a temp file to store our conversion
using (TextWriter tw = new StreamWriter(CSVFile + ".tmp"))
{
string line = tr.ReadLine();
//If we have already converted, no need to reconvert.
//NOTE: We make the assumption here that the input header file
// doesn't have a PH1 value unless it's already been converted.
if (line.IndexOf(PH1) >= 0)
{
tw.Close();
tr.Close();
File.Delete(CSVFile + ".tmp");
return;
}
//Loop through input file
while (!string.IsNullOrEmpty(line))
{
ln = new StringBuilder();
//1. Use Regex expression to find comma separated values
//using quotes as optional text qualifiers
//(what MS EXCEL does when you import a csv file)
//2. Remove text qualifier quotes from data
//3. Replace any values of PH1 found in column data
//with an equivalent character
//Regex: \A[^,]*(?=,)|(?:[^",]*"[^"]*"[^",]*)+|[^",]*"[^"]*\Z|(?<=,)[^,]*(?=,)|(?<=,)[^,]*\Z|\A[^,]*\Z
List<string> fieldList = Regex.Matches(line, #"\A[^,]*(?=,)|(?:[^"",]*""[^""]*""[^"",]*)+|[^"",]*""[^""]*\Z|(?<=,)[^,]*(?=,)|(?<=,)[^,]*\Z|\A[^,]*\Z")
.Cast<Match>()
.Select(m => RemoveCSVQuotes(m.Value).Replace(PH1, '¦'))
.ToList<string>();
//Add the list of fields to ln, separated by PH1
fieldList.ToList().ForEach(m => ln.Append(m + PH1));
//Write to file. Don't include trailing PH1 value.
tw.WriteLine(ln.ToString().Substring(0, ln.ToString().LastIndexOf(PH1)));
line = tr.ReadLine();
}
tw.Close();
}
tr.Close();
//Optional: replace input file with output file
File.Delete(CSVFile);
File.Move(CSVFile + ".tmp", CSVFile);
}
}
else
{
throw new ArgumentException(string.Format("Source file {0} not found", CSVFile));
}
}
//The output file no longer needs quotes as a text qualifier, so remove them
private string RemoveCSVQuotes(string value)
{
//if is empty string, then remove double quotes
if (value == #"""""") value = "";
//remove any double quotes, then any quotes on ends
value = value.Replace(#"""""", #"""");
if (value.Length >= 2)
if (value.Substring(0, 1) == #"""")
value = value.Substring(1, value.Length - 2);
return value;
}
More often than not, this issue is caused by users exporting an Excel file to CSV.
There are two ways around this problem:
Export from Excel using a macro, as per Microsoft's suggestion
Or the really easy way:
Open the CSV in Excel.
Save as Excel file. (.xls or .xlsx).
Import that file into SQL Server as an Excel file.
Chuckle to yourself because you didn't have to code anything like the solutions above.... muhahahaha
Here's some SQL if you really want to script it (after saving the CSV as Excel):
select *
into SQLServerTable FROM OPENROWSET('Microsoft.Jet.OLEDB.4.0',
'Excel 8.0;Database=D:\testing.xls;HDR=YES',
'SELECT * FROM [Sheet1$]')
This might be more complicated or involved than what your willing to use, but ...
If you can implement the logic for parsing the lines into fields in VB or C#, you can do this using a CLR table valued function (TVF).
A CLR TVF can be a good performing way to read data in from external source when you want to have some C# or VB code separate the data into columns and/or adjust the values.
You have to be willing to add a CLR assembly to your database (and one that allows external or unsafe operations so it can open files). This can get a bit complicated or involved, but might be worth it for the flexibility you get.
I had some large files that needed to be regularly loaded to tables as fast as possible, but certain code translations needed to be performed on some columns and special handling was needed to load values that would have otherwise caused datatype errors with a plain bulk insert.
In short, a CLR TVF lets you run C# or VB code against each line of the file with bulk insert like performance (although you may need to worry about logging). The example in the SQL Server documentation lets you create a TVF to read from the event log that you could use as a starting point.
Note that the code in the CLR TVF can only access the database in an init stage before the first row is processed (eg. no lookups for each row - you use a normal TVF on top of this to do such things). You don't appear to need this based on your question.
Also note, each CLR TVF must have its output columns explicitly specified, so you can't write a generic one that is reusable for each different csv file you might have.
You could write one CLR TVF to read whole lines from the file, returning a one column result set, then use normal TVFs to read from that for each type of file. This requires the code to parse each line to be written in T-SQL, but avoids having to write many CLR TVFs.
An alternate method--assuming you don't have a load of fields or expect a quote to appear in the data itself would be to use the REPLACE function.
UPDATE dbo.tablename
SET dbo.tablename.target_field = REPLACE(t.importedValue, '"', '')
FROM #tempTable t
WHERE dbo.tablename.target_id = t.importedID;
I have used it. I can't make any claims regarding performance. It is just a quick and dirty way to get around the problem.
Preprocessing is needed.
The PowerShell function Import-CSV supports this type of file. Export-CSV will then encapsulate each value in quotes.
Single file:
Import-Csv import.csv | Export-Csv -NoTypeInformation export.csv
To merge many files with paths C:\year\input_date.csv:
$inputPath = 'C:\????\input_????????.csv'
$outputPath = 'C:\merged.csv'
Get-ChildItem $inputPath |
Select -ExpandProperty FullName |
Import-CSV |
Export-CSV -NoTypeInformation -Path $outputPath
PowerShell can typically be run with SQL Server Agent using a PowerShell proxy account.
In case delimiters are not handled properly, explicitly specify another delimiter.
Export-CSV -NoTypeInformation -Delimiter ';' -Path $outputPath
You should be able to specifiy not only the field separator, which should be [,] but also the text qualifier, which in this case would be ["]. Using [] to enclose that so there's no confusion with ".
I found few issues while having ',' inside our fields like Mike,”456 2nd St, Apt 5".
Solution to this issue is # http://crazzycoding.blogspot.com/2010/11/import-csv-file-into-sql-server-using.html
Thanks,
- Ashish
Chris,
Thanks a bunch for this!! You saved my biscuits!! I could not believe that bulk loader wouldn't handle this case when XL does such a nice job..don't these guys see eachother in the halls???
Anyway...I needed a ConsoleApplication version so here is what I hacked together. It's down and dirty but it works like a champ! I hardcoded the delimiter and commented out the header as they were not needed for my app.
I wish I could also paste a nice big beer in here for ya too.
Geeze, I have no idea why the End Module and Public Class are outside the code block...srry!
Module Module1
Sub Main()
Dim arrArgs() As String = Command.Split(",")
Dim i As Integer
Dim obj As New ReDelimIt()
Console.Write(vbNewLine & vbNewLine)
If arrArgs(0) <> Nothing Then
For i = LBound(arrArgs) To UBound(arrArgs)
Console.Write("Parameter " & i & " is " & arrArgs(i) & vbNewLine)
Next
obj.ProcessFile(arrArgs(0), arrArgs(1))
Else
Console.Write("Usage Test1 <inputfile>,<outputfile>")
End If
Console.Write(vbNewLine & vbNewLine)
End Sub
End Module
Public Class ReDelimIt
Public Function ProcessFile(ByVal InputFile As String, ByVal OutputFile As String) As Integer
Dim ph1 As String = "|"
Dim objReader As System.IO.StreamReader = Nothing
Dim count As Integer = 0 'This will also serve as a primary key
Dim sb As New System.Text.StringBuilder
Try
objReader = New System.IO.StreamReader(System.IO.File.OpenRead(InputFile), System.Text.Encoding.Default)
Catch ex As Exception
MsgBox(ex.Message)
End Try
If objReader Is Nothing Then
MsgBox("Invalid file: " & InputFile)
count = -1
Exit Function
End If
'grab the first line
Dim line = objReader.ReadLine()
'and advance to the next line b/c the first line is column headings
'Removed Check Headers can put in if needed.
'If chkHeaders.Checked Then
'line = objReader.ReadLine
'End If
While Not String.IsNullOrEmpty(line) 'loop through each line
count += 1
'Replace commas with our custom-made delimiter
line = line.Replace(",", ph1)
'Find a quoted part of the line, which could legitimately contain commas.
'In that case we will need to identify the quoted section and swap commas back in for our custom placeholder.
Dim starti = line.IndexOf(ph1 & """", 0)
While starti > -1 'loop through quoted fields
'Find end quote token (originally a ",)
Dim endi = line.IndexOf("""" & ph1, starti)
'The end quote token could be a false positive because there could occur a ", sequence.
'It would be double-quoted ("",) so check for that here
Dim check1 = line.IndexOf("""""" & ph1, starti)
'A """, sequence can occur if a quoted field ends in a quote.
'In this case, the above check matches, but we actually SHOULD process this as an end quote token
Dim check2 = line.IndexOf("""""""" & ph1, starti)
'If we are in the check1 ("",) situation, keep searching for an end quote token
'The +1 and +2 accounts for the extra length of the checked sequences
While (endi = check1 + 1 AndAlso endi <> check2 + 2) 'loop through "false" tokens in the quoted fields
endi = line.IndexOf("""" & ph1, endi + 1)
check1 = line.IndexOf("""""" & ph1, check1 + 1)
check2 = line.IndexOf("""""""" & ph1, check2 + 1)
End While
'We have searched for an end token (",) but can't find one, so that means the line ends in a "
If endi < 0 Then endi = line.Length - 1
'Grab the quoted field from the line, now that we have the start and ending indices
Dim source = line.Substring(starti + ph1.Length, endi - starti - ph1.Length + 1)
'And swap the commas back in
line = line.Replace(source, source.Replace(ph1, ","))
'Find the next quoted field
If endi >= line.Length - 1 Then endi = line.Length 'During the swap, the length of line shrinks so an endi value at the end of the line will fail
starti = line.IndexOf(ph1 & """", starti + ph1.Length)
End While
'Add our primary key to the line
' Removed for now
'If chkAddKey.Checked Then
'line = String.Concat(count.ToString, ph1, line)
' End If
sb.AppendLine(line)
line = objReader.ReadLine
End While
objReader.Close()
SaveTextToFile(sb.ToString, OutputFile)
Return count
End Function
Public Function SaveTextToFile(ByVal strData As String, ByVal FullPath As String) As Boolean
Dim bAns As Boolean = False
Dim objReader As System.IO.StreamWriter
Try
objReader = New System.IO.StreamWriter(FullPath, False, System.Text.Encoding.Default)
objReader.Write(strData)
objReader.Close()
bAns = True
Catch Ex As Exception
Throw Ex
End Try
Return bAns
End Function
End Class
This code work for me :
public bool CSVFileRead(string fullPathWithFileName, string fileNameModified, string tableName)
{
SqlConnection con = new SqlConnection(ConfigurationSettings.AppSettings["dbConnectionString"]);
string filepath = fullPathWithFileName;
StreamReader sr = new StreamReader(filepath);
string line = sr.ReadLine();
string[] value = line.Split(',');
DataTable dt = new DataTable();
DataRow row;
foreach (string dc in value)
{
dt.Columns.Add(new DataColumn(dc));
}
while (!sr.EndOfStream)
{
//string[] stud = sr.ReadLine().Split(',');
//for (int i = 0; i < stud.Length; i++)
//{
// stud[i] = stud[i].Replace("\"", "");
//}
//value = stud;
value = sr.ReadLine().Split(',');
if (value.Length == dt.Columns.Count)
{
row = dt.NewRow();
row.ItemArray = value;
dt.Rows.Add(row);
}
}
SqlBulkCopy bc = new SqlBulkCopy(con.ConnectionString, SqlBulkCopyOptions.TableLock);
bc.DestinationTableName = tableName;
bc.BatchSize = dt.Rows.Count;
con.Open();
bc.WriteToServer(dt);
bc.Close();
con.Close();
return true;
}
I put together the below to solve my case. I needed to pre-process very large files and sort out the inconsistent quoting. Just paste it in to a blank C# application, set the consts to your requirements and away you go. This worked on very large CSV's of over 10 GB.
namespace CsvFixer
{
using System.IO;
using System.Text;
public class Program
{
private const string delimiter = ",";
private const string quote = "\"";
private const string inputFile = "C:\\temp\\input.csv";
private const string fixedFile = "C:\\temp\\fixed.csv";
/// <summary>
/// This application fixes inconsistently quoted csv (or delimited) files with support for very large file sizes.
/// For example : 1223,5235234,8674,"Houston","London, UK",3425,Other text,stuff
/// Must become : "1223","5235234","8674","Houston","London, UK","3425","Other text","stuff"
/// </summary>
/// <param name="args"></param>
static void Main(string[] args)
{
// Use streaming to allow for large files.
using (StreamWriter outfile = new StreamWriter(fixedFile))
{
using (FileStream fs = File.Open(inputFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (BufferedStream bs = new BufferedStream(fs))
using (StreamReader sr = new StreamReader(bs))
{
string currentLine;
// Read each input line in and write each fixed line out
while ((currentLine = sr.ReadLine()) != null)
{
outfile.WriteLine(FixLine(currentLine, delimiter, quote));
}
}
}
}
/// <summary>
/// Fully quote a partially quoted line
/// </summary>
/// <param name="line">The partially quoted line</param>
/// <returns>The fully quoted line</returns>
private static string FixLine(string line, string delimiter, string quote)
{
StringBuilder fixedLine = new StringBuilder();
// Split all on the delimiter, acceptinmg that some quoted fields
// that contain the delimiter wwill be split in to many pieces.
string[] fieldParts = line.Split(delimiter.ToCharArray());
// Loop through the fields (or parts of fields)
for (int i = 0; i < fieldParts.Length; i++)
{
string currentFieldPart = fieldParts[i];
// If the current field part starts and ends with a quote it is a field, so write it to the result
if (currentFieldPart.StartsWith(quote) && currentFieldPart.EndsWith(quote))
{
fixedLine.Append(string.Format("{0}{1}", currentFieldPart, delimiter));
}
// else if it starts with a quote but doesnt end with one, it is part of a lionger field.
else if (currentFieldPart.StartsWith(quote))
{
// Add the start of the field
fixedLine.Append(string.Format("{0}{1}", currentFieldPart, delimiter));
// Append any additional field parts (we will only hit the end of the field when
// the last field part finishes with a quote.
while (!fieldParts[++i].EndsWith(quote))
{
fixedLine.Append(string.Format("{0}{1}", fieldParts[i], delimiter));
}
// Append the last field part - i.e. the part containing the closing quote
fixedLine.Append(string.Format("{0}{1}", fieldParts[i], delimiter));
}
else
{
// The field has no quotes, add the feildpart with quote as bookmarks
fixedLine.Append(string.Format("{0}{1}{0}{2}", quote, currentFieldPart, delimiter));
}
}
// Return the fixed string
return fixedLine.ToString();
}
}
}
Speaking from practice...In SQL Server 2017 you can provide a 'Text qualifier' of double quote, and it doesn't "supersede" your delimiter. I bulk insert several files that look just like the example by the OP. My files are ".csv" and they have inconsistent text qualifiers that are only found when the value contains a comma. I have no idea what Version of SQL Server this feature/functionality started working, but I know it works in SQL Server 2017 Standard. Pretty easy.
You do not need to preprocess the file outside of SQL.
What worked for me was changing
ROWTERMINATOR = '\n'
to
ROWTERMINATOR = '0x0a'.
A new option was added in SQL 2017 to specify WITH ( FORMAT='CSV') for BULK INSERT commands.
An example from a Microsoft GitHub page:
BULK INSERT Product
FROM 'product.csv'
WITH ( DATA_SOURCE = 'MyAzureBlobStorage',
FORMAT='CSV', CODEPAGE = 65001, --UTF-8 encoding
FIRSTROW=2,
ROWTERMINATOR = '0x0a',
TABLOCK);
Detailed documentation for that option is available here:
https://learn.microsoft.com/en-us/sql/t-sql/statements/bulk-insert-transact-sql?view=sql-server-2017#input-file-format-options
I have successfully used this option with CSV data containing optional quotes just as the OP gave an example of.
Create a VB.NET Program to convert to new Delimiter using 4.5 Framework TextFieldParser
This will automatically handle Text qualified fields
Modified above code to use built in TextFieldParser
Module Module1
Sub Main()
Dim arrArgs() As String = Command.Split(",")
Dim i As Integer
Dim obj As New ReDelimIt()
Dim InputFile As String = ""
Dim OutPutFile As String = ""
Dim NewDelimiter As String = ""
Console.Write(vbNewLine & vbNewLine)
If Not IsNothing(arrArgs(0)) Then
For i = LBound(arrArgs) To UBound(arrArgs)
Console.Write("Parameter " & i & " is " & arrArgs(i) & vbNewLine)
Next
InputFile = arrArgs(0)
If Not IsNothing(arrArgs(1)) Then
If Not String.IsNullOrEmpty(arrArgs(1)) Then
OutPutFile = arrArgs(1)
Else
OutPutFile = InputFile.Replace("csv", "pipe")
End If
Else
OutPutFile = InputFile.Replace("csv", "pipe")
End If
If Not IsNothing(arrArgs(2)) Then
If Not String.IsNullOrEmpty(arrArgs(2)) Then
NewDelimiter = arrArgs(2)
Else
NewDelimiter = "|"
End If
Else
NewDelimiter = "|"
End If
obj.ConvertCSVFile(InputFile,OutPutFile,NewDelimiter)
Else
Console.Write("Usage ChangeFileDelimiter <inputfile>,<outputfile>,<NewDelimiter>")
End If
obj = Nothing
Console.Write(vbNewLine & vbNewLine)
'Console.ReadLine()
End Sub
End Module
Public Class ReDelimIt
Public Function ConvertCSVFile(ByVal InputFile As String, ByVal OutputFile As String, Optional ByVal NewDelimiter As String = "|") As Integer
Using MyReader As New Microsoft.VisualBasic.FileIO.TextFieldParser(InputFile)
MyReader.TextFieldType = FileIO.FieldType.Delimited
MyReader.SetDelimiters(",")
Dim sb As New System.Text.StringBuilder
Dim strLine As String = ""
Dim currentRow As String()
While Not MyReader.EndOfData
Try
currentRow = MyReader.ReadFields()
Dim currentField As String
strLine = ""
For Each currentField In currentRow
'MsgBox(currentField)
If strLine = "" Then
strLine = strLine & currentField
Else
strLine = strLine & NewDelimiter & currentField
End If
Next
sb.AppendLine(strLine)
Catch ex As Microsoft.VisualBasic.FileIO.MalformedLineException
'MsgBox("Line " & ex.Message & "is not valid and will be skipped.")
Console.WriteLine("Line " & ex.Message & "is not valid and will be skipped.")
End Try
End While
SaveTextToFile(sb.ToString, OutputFile)
End Using
Return Err.Number
End Function
Public Function SaveTextToFile(ByVal strData As String, ByVal FullPath As String) As Boolean
Dim bAns As Boolean = False
Dim objReader As System.IO.StreamWriter
Try
If FileIO.FileSystem.FileExists(FullPath) Then
Kill(FullPath)
End If
objReader = New System.IO.StreamWriter(FullPath, False, System.Text.Encoding.Default)
objReader.Write(strData)
objReader.Close()
bAns = True
Catch Ex As Exception
Throw Ex
End Try
Return bAns
End Function
End Class

Resources