I have an application that trying to extract all data in different table with 1 Database. First, I stored all the query in a .txt file to retrieve the table name and stored it in List.
[Here's my .txt file]
string script = File.ReadAllText(#"D:\Schooldb\School.txt");
List<string> strings = new List<string>();
strings.Add(script);
using (SqlConnection connection = new SqlConnection(constring))
{
foreach (string x in strings)
{
using (SqlCommand cmd = new SqlCommand(x, connection))
{
using (SqlDataAdapter adapter = new SqlDataAdapter())
{
cmd.Connection = connection;
adapter.SelectCommand = cmd;
using (DataTable dt = new DataTable())
{
adapter.Fill(dt);
string txt = string.Empty;
foreach (DataColumn column in dt.Columns)
{
//Add the Header row for Text file.
txt += column.ColumnName + "\t\t";
}
//Add new line after Column Name.
txt += "\r\n";
foreach (DataRow row in dt.Rows)
{
foreach (DataColumn column in dt.Columns)
{
//Add the Data rows.
txt += row[column.ColumnName].ToString() + "***";
}
//Add new line.
txt += "\r\n";
}
int y = 0;
StreamWriter file = new StreamWriter($#"D:\SchoolOutput\{x}_{DateTime.Now.ToString("yyyyMMdd")}.txt");
file.WriteLine(txt.ToString());
file.Close();
y++;
}
}
}
}
Expected:
teachers_datetoday
students_datetoday
subjects_datetoday
But reality my output is just
datetoday txt
Can someone tell me, where part did I go wrong?
Thanks in advance!
There are other approaches for extracting data directly using SSMS.
In this case, your code reads the entire text as a single string, and the for loop runs only once.
Instead of reading the entire file as a string, you can have each line as one command and read the commands like the following.
foreach (string line in System.IO.File.ReadLines(#"D:\Schooldb\School.txt"))
{
//Each line contains one command
//Write your logic here
}
Related
I want to load csv files into an SQL table using a Script Task in SSIS which have similar column names but not exactly the same and the number of columns also vary.
I am currently using the script below from this useful blog which checks whether the exact column name exists and if it does, loads it into the table, however, it will fail if the column name doesn't exist. Is there a way to use the LIKE operator to search the column name from the csv file in the sql table? If it finds it, load the data into the table and if it doesn't find it, ignore the column.
Script:
public void Main()
{
string delimiter = Dts.Variables["$Package::Delimiter"].Value.ToString();
string TableName = Dts.Variables["$Package::TableName"].Value.ToString();
SqlConnection myADONETConnection = new SqlConnection();
myADONETConnection = (SqlConnection)
(Dts.Connections["ADOConn"].AcquireConnection(Dts.Transaction) as SqlConnection);
//Reading file names one by one
string SourceDirectory = Dts.Variables["$Package::SourceFolder"].Value.ToString();
string[] fileEntries = Directory.GetFiles(SourceDirectory);
foreach (string fileName in fileEntries)
{
// MessageBox.Show(fileName);
string columname = "";
//Reading first line of each file and assign to variable
System.IO.StreamReader file2 = new System.IO.StreamReader(fileName);
//Writing Data of File Into Table
int counter = 0;
string line;
System.IO.StreamReader SourceFile =
new System.IO.StreamReader(fileName);
while ((line = SourceFile.ReadLine()) != null)
{
if (counter == 0)
{
columname = line.ToString();
columname = "" + columname.Replace(delimiter, ",");
//MessageBox.Show(columname);
}
else
{
// MessageBox.Show("Inside ELSE");
string query = "Insert into " + TableName +
"(" + columname + ") VALUES('" + line.Replace(delimiter, "','") + "')";
//MessageBox.Show(query.ToString());
SqlCommand myCommand1 = new SqlCommand(query, myADONETConnection);
myCommand1.ExecuteNonQuery();
}
counter++;
}
SourceFile.Close();
}
Dts.TaskResult = (int)ScriptResults.Success;
}
#region ScriptResults declaration
/// <summary>
/// This enum provides a convenient shorthand within the scope of this class for setting the
/// result of the script.
///
/// This code was generated automatically.
/// </summary>
enum ScriptResults
{
Success = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success,
Failure = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure
};
#endregion
}
}
Thanks,
J
I changed the name of one of my tables, then afterwards encoded some data then pulled it using a view to my surprise the data is not showing. I tried renaming it back to its original name with no luck the same thing is happening.
Then finally I tried retyping the data on one of the columns and then executed the view and there the data is finally showing now the problem arises I need to re encode the data on one of the column every time a data is inserted which is obviously not a good thing to do.
here is the code on how i added some data
tblcsv.Columns.AddRange(new DataColumn[7] { new DataColumn("unit_name", typeof(string)), new DataColumn("unit", typeof(string)), new DataColumn("adrress", typeof(string)), new DataColumn("latitude", typeof(string))
,new DataColumn("longitude" , typeof(string)) , new DataColumn("region" , typeof(string)) , new DataColumn("linkid" , typeof(string))});
string ReadCSV = File.ReadAllText(forex);
foreach (string csvRow in ReadCSV.Split('\n'))
{
if (!string.IsNullOrEmpty(csvRow))
{
//Adding each row into datatable
tblcsv.Rows.Add();
int count = 0;
foreach (string FileRec in csvRow.Split(','))
{
tblcsv.Rows[tblcsv.Rows.Count - 1][count] = FileRec;
if (count == 5)
{
tblcsv.Rows[tblcsv.Rows.Count - 1][6] = link;
}
count++;
}
}
}
string consString = ConfigurationManager.ConnectionStrings["diposlConnectionString"].ConnectionString;
using (SqlConnection con = new SqlConnection(consString))
{
using (SqlBulkCopy sqlBulkCopy = new SqlBulkCopy(con))
{
//Set the database table name
sqlBulkCopy.DestinationTableName = "dbo.FRIENDLY_FORCES";
//[OPTIONAL]: Map the Excel columns with that of the database table
sqlBulkCopy.ColumnMappings.Add("unit_name", "unit_name");
sqlBulkCopy.ColumnMappings.Add("unit", "unit");
sqlBulkCopy.ColumnMappings.Add("adrress", "adrress");
sqlBulkCopy.ColumnMappings.Add("latitude", "latitude");
sqlBulkCopy.ColumnMappings.Add("longitude", "longitude");
sqlBulkCopy.ColumnMappings.Add("region", "region");
sqlBulkCopy.ColumnMappings.Add("linkid", "linkid");
con.Open();
sqlBulkCopy.WriteToServer(tblcsv);
con.Close();
}
}
the column region is where i manually edited the data
Did the renaming of the table did something to my data?
Or am I just missing something?
Thank you
I'm having an issue with SSIS. Its really throwing things off. Here is an example of what I am facing
H~Column1~Column2~Column3~Column4
D~1~2~3~4<LF>
D~6-7-8-9<LF>
T~ More Stuff<LF>
The first line doesn't have an LF character so when I set up a File Task in SSIS, the program reads it as 1 column as one long string
H~Column1~Column2~Column3~Column4D~1~2~3~4D~6-7-8-9T~ More Stuff
Any idea on how to break this up so that SSIS can delimit this properly.
Create a Script task to read the whole file line-by-line and output it to a new file caled {YourFilename}_Cleaned (or something like that). Below is a skeleton of the Main() method. Just replace the comment "// insert LF into LineData after column name list" with the code to insert the LF at the correct point in your first line.
/* include these
using System;
using System.IO;
using System.Text.RegularExpressions;
using System.Data;
using Microsoft.SqlServer.Dts.Runtime;
using System.Windows.Forms;
*/
// put the below in your Main() method
string sourceFile = (string)Dts.Variables["FilePickupRootPath"].Value + "\\Process\\" + (string)Dts.Variables["FileName"].Value;
string cleanFile = (string)Dts.Variables["FilePickupRootPath"].Value + "\\Process\\" + (string)Dts.Variables["FileName"].Value + "_Cleaned";
string lineData;
Boolean isFirstLine = true;
try
{
StreamReader reader = new StreamReader(sourceFile);
StreamWriter writer = new StreamWriter(cleanFile, false);
lineData = reader.ReadLine();
while (lineData != null)
{
if (isFirstLine)
{
// insert LF into LineData after column name list
isFirstLine = false;
}
writer.WriteLine(lineData);
lineData = reader.ReadLine();
}
reader.Close();
writer.Close();
}
catch (Exception e)
{
MessageBox.Show(e.Message, "Error!");
Console.WriteLine("Exception: " + e.ToString());
Dts.TaskResult = (int)ScriptResults.Failure;
}
Dts.TaskResult = (int)ScriptResults.Success;
Consider the following code:
[Test]
public void StackOverflowQuestionTest()
{
const string connectionString = "enter your connection string if you wanna test this code";
byte[] result = null;
using (var connection = new SqlConnection(connectionString))
{
connection.Open();
using (var sqlCommand = new SqlCommand("declare #xml as xml = '<xml/>' SELECT convert(varbinary(max), #xml) as value"))
//using (var sqlCommand = new SqlCommand("SELECT convert(varbinary(max), N'<xml/>') as value"))
{
sqlCommand.Connection = connection;
using (SqlDataReader reader = sqlCommand.ExecuteReader())
{
while (reader.Read())
{
result = (byte[])reader["value"];
}
reader.Close();
}
}
}
string decodedString = new UnicodeEncoding(false, true).GetString(result);
var document = XElement.Parse(decodedString);
}
If I run this test I get an XmlException with message : "Data at the root level is invalid. Line 1, position 1." As it turns out the problem is "0xFFFE" preamble which is considered as invalid character.
Note that if I use commented string instead, everything works just fine, which is strange as per me. Looks like SqlServer stores XML strings in UCS-2 with a BOM, and at the same time it stores nvarchar values without it.
The main question is: how can I decode this byte array to string which will not contain this preamble (BOM)?
In case anyone will need this in future, the following code works:
using(var ms = new MemoryStream(result))
{
using (var sr = new StreamReader(ms, Encoding.Unicode, true))
{
decodedString = sr.ReadToEnd();
}
}
I am trying to insert a .csv file into SQL Server 2008 R2.
The .csv is 300+MB from http://ipinfodb.com/ip_database.php Complete
(City), 4.0M records.
Here're the top 5 lines, with 1st line = column headers:
"ip_start";"country_code";"country_name";"region_code";"region_name";"city";"zipcode";"latitude";"longitude";"metrocode"
"0";"RD";"Reserved";;;;;"0";"0";
"16777216";"AU";"Australia";;;;;"-27";"133";
"17367040";"MY";"Malaysia";;;;;"2.5";"112.5";
"17435136";"AU";"Australia";;;;;"-27";"133";
I tried Import and Export Data, and BULK INSERT, but haven't been able to import them correctly yet.
Shall I resort to use bcp? can it handle stripping the ""? how?
Thank you very much.
Got it, forgot to set Text Qualifier as ":
Your data looks pretty inconsistent since NULL values don't also carry a quotation enclosure.
I believe you can create a format file to customize to your particular csv file and its particular terminators in SQL SERVER.
See more here:
http://lanestechblog.blogspot.com/2008/08/sql-server-bulk-insert-using-format.html
Is this a single import or are you wanting to schedule a recurring import? If this is a one-time task, you should be able to use the Import and Export Wizard. The text qualifier will be the quotation mark ("), be sure to select column names in the first data row, and you'll want to convey that the field delimiter is the semicolon (;).
I'm not certain the file is properly formatted - the last semicolon following each of the data rows might be a problem. If you hit any errors, simply add a new column header to the file.
EDIT: I just did a quick test, the semicolons at the end will be treated as part of the final value in that row. I would suggest adding a ;"tempheader" at the end of your header (first) row - that will cause SQL to treat the final semicolon as a delimiter and you can delete that extra column once the import is complete.
In C# you can use this code, working for me
public bool CSVFileRead(string fullPathWithFileName, string fileNameModified, string tableName)
{
SqlConnection con = new SqlConnection(ConfigurationSettings.AppSettings["dbConnectionString"]);
string filepath = fullPathWithFileName;
StreamReader sr = new StreamReader(filepath);
string line = sr.ReadLine();
string[] value = line.Split(',');
DataTable dt = new DataTable();
DataRow row;
foreach (string dc in value)
{
dt.Columns.Add(new DataColumn(dc));
}
while (!sr.EndOfStream)
{
//string[] stud = sr.ReadLine().Split(',');
//for (int i = 0; i < stud.Length; i++)
//{
// stud[i] = stud[i].Replace("\"", "");
//}
//value = stud;
value = sr.ReadLine().Split(',');
if (value.Length == dt.Columns.Count)
{
row = dt.NewRow();
row.ItemArray = value;
dt.Rows.Add(row);
}
}
SqlBulkCopy bc = new SqlBulkCopy(con.ConnectionString, SqlBulkCopyOptions.TableLock);
bc.DestinationTableName = tableName;
bc.BatchSize = dt.Rows.Count;
con.Open();
bc.WriteToServer(dt);
bc.Close();
con.Close();
return true;
}