I tried to insert excel data to database, MS SQL.
Currently I looped the excel records and insert. It took too long.
Is there any way to insert excel records to database once ?
Thanks and Regards,
Here is my code:
User user = new User();
cmd_obj = new OleDbCommand("SELECT * FROM [Sheet1$]", con_obj);
OleDbDataReader dr = cmd_obj.ExecuteReader();
while (dr.Read())
{
int blnBadSyntax = 0;
int blnBadDomain = 0;
int blnBadSMTP = 0;
int blnGreylisted = 0;
int blnBadMailbox = 0;
bool blnIsValid = false;
string key = "2CH3W-7ENLC-FWLZ4-WEUVY-JRQ11-AU69U-W63V5-ULF1C-DA5RC-RU7XS-XK6JY-6JT5U-MYLX";
MXValidate.LoadLicenseKey(key);
MXValidate mx = new MXValidate();
mx.LogInMemory = true;
mx.CheckLiteralDomain = true;
mx.CheckGreylisting = true;
try
{
MXValidateLevel level = mx.Validate(user.StrEmailId, MXValidateLevel.Mailbox);
switch (level)
{
case MXValidateLevel.NotValid:
blnBadSyntax = 1;
break;
case MXValidateLevel.Syntax:
blnBadDomain = 1;
break;
case MXValidateLevel.MXRecords:
blnBadSMTP = 1;
break;
case MXValidateLevel.SMTP:
blnGreylisted = 1;
blnIsValid = true;
break;
case MXValidateLevel.Greylisted:
blnBadMailbox = 1;
blnIsValid = true;
break;
case MXValidateLevel.Mailbox:
blnIsValid = true;
break;
}
user.BlnBadSyntax = blnBadSyntax;
user.BlnBadDomain = blnBadDomain;
user.BlnBadSMTP = blnBadSMTP;
user.BlnGraylisted = blnGreylisted;
user.BlnBadMailBox = blnBadMailbox;
if (blnIsValid)
{
user.StrStatus = "Valid";
}
else
{
user.StrStatus = "InValid";
logFile.writeLog(mx.GetLog());
}
}
catch (DnsException ex)
{
logFile.writeLog(mx.GetLog());
}
InsertuserDetails(user);
}
You can do this with the help of SqlBulkCopy if the data is large.
Kindly check the following post for more details:
http://technico.qnownow.com/bulk-copy-data-from-excel-to-destination-db-using-sql-bulk-copy/
// Connection String to Excel Workbook
string excelConnectionString = #"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=Book1.xls;ExtendedProperties=""Excel 8.0;HDR=YES;""";
// Create Connection to Excel Workbook
using (OleDbConnection connection = new OleDbConnection(excelConnectionString))
{
OleDbCommand command = new OleDbCommand("Select ID,Data FROM [Data$]", connection);
connection.Open();
// Create DbDataReader to Data Worksheet
using (DbDataReader dr = command.ExecuteReader())
{
// SQL Server Connection String
string sqlConnectionString = "Data Source=.;Initial Catalog=Test;Integrated Security=True";
// Bulk Copy to SQL Server
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlConnectionString))
{
bulkCopy.DestinationTableName = "ExcelData";
bulkCopy.WriteToServer(dr);
}
Related
Im trying to update rows in my table using loop, I get no error but nothing changed in my data...Thats my code.. What am I missing ?
private void updateZipInDB(List<ZipObj> zipCodeToUpdate)
{
var comm = "";
string DbConnString = ConfigurationManager.AppSettings["dbConnectionString"].ToString();
using (SqlConnection conn = new SqlConnection(DbConnString))
{
comm = "UPDATE Account SET address1_postalcode = #newVal WHERE AccountId = #accountID";
using (SqlCommand command = new SqlCommand(comm, conn))
{
conn.Open();
command.Parameters.AddWithValue("#newVal", "f");
command.Parameters.AddWithValue("#accountID", "f");
for (int i = 0; i < zipCodeToUpdate.Count; i++)
{
zipCodeToUpdate[i].NewVal + "' WHERE AccountId = '" + zipCodeToUpdate[i].AccountId + "'";
command.Parameters["#newVal"].Value = zipCodeToUpdate[i].NewVal;
command.Parameters["#accountID"].Value = zipCodeToUpdate[i].AccountId;
command.ExecuteNonQuery();
}
conn.Close();
}
}
}
I am using CFN to create an HA environment and RDS seems to be the best way for SQL Server DB rather than instances. Now I have tried manual deployment taking RDS and restoring .bak using option group and connecting it with S3 by IAM and EC2. But I am facing a wall when doing the same with CFN automation. Is there a way?
I don't have this in a state where I can "package it up for you", but this should give you a good head start....
public abstract class DatabaseFunctionBase
{
protected static bool IsTaskComplete(SqlConnection sqlConnection, int task)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("task_id", SqlDbType.Int).Value = task;
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
return status == "SUCCESS";
}
}
return false;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected int GetTaskId(SqlConnection sqlConnection, string dbName)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("db_name", SqlDbType.VarChar).Value = dbName;
do
{
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
var id = reader.GetInt32(0);
var db = reader.GetString(2);
if ((status == "CREATED" || status == "IN_PROGRESS") && db == dbName)
{
return id;
}
}
Thread.Sleep(TimeSpan.FromSeconds(5));
}
} while (true);
throw new InvalidOperationException();
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected async Task BackupDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
InitialCatalog = info.DbCatalog,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_backup_database";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("source_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
command.Parameters.Add("s3_arn_to_backup_to", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.DbCatalog}{DateTime.Now:O}.bak";
command.Parameters.Add("overwrite_S3_backup_file", SqlDbType.TinyInt).Value = 1;
command.ExecuteNonQuery();
var taskId = this.GetTaskId(sqlConnection, info.DbCatalog);
//LambdaLogger.Log($"{nameof(taskId)}={taskId}");
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
protected async Task CreateDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true,
InitialCatalog = info.DbCatalog
};
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
try
{
sqlConnection.Open();
// already present - exit
return;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
}
}
// remove the catalog so we can connect to the server directly
sqlConnectionStringBuilder.InitialCatalog = string.Empty;
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
sqlConnection.Open();
await using var restoreCommand = sqlConnection.CreateCommand();
restoreCommand.CommandText = "msdb.dbo.rds_restore_database";
restoreCommand.CommandType = CommandType.StoredProcedure;
restoreCommand.Parameters.Add("restore_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
restoreCommand.Parameters.Add("s3_arn_to_restore_from", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.FromCatalog}.bak";
restoreCommand.ExecuteNonQuery();
var taskId = GetTaskId(sqlConnection, info.DbCatalog);
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
// this might be redundant in a merge
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
await using var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
try
{
sqlConnection.Open();
break;
}
catch (Exception exception)
{
//LambdaLogger.Log(exception.ToString());
await Task.Delay(TimeSpan.FromSeconds(5));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
try
{
await using var sqlConnection2 = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
sqlConnection2.Open();
break;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
await Task.Delay(TimeSpan.FromSeconds(15));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
}
protected async Task DeleteDatabase(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
if (string.Equals(info.DbCatalog.ToString(), "app", StringComparison.InvariantCultureIgnoreCase))
{
return;
}
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var dropCommand = sqlConnection.CreateCommand();
dropCommand.CommandText = "msdb.dbo.rds_drop_database";
dropCommand.CommandType = CommandType.StoredProcedure;
dropCommand.Parameters.Add("db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
dropCommand.ExecuteNonQuery();
}
}
I looked at this post
I know this post is old,I'm having the same issue with excel file. My scenario is: I have a Desktop app written in javaFX the user export a TableView content into an excel file. Everything goes fine except when the file tries to open the message of the file is locked by user for editing.
How do I force or work around to unlock the file from the App that generate it?
This was my initial code
String directory = CreateDir();
if (rs != null) {
File file_excel = new File(directory + "\\" + reportName + ".xls");
file_excel.createNewFile();
while (rs.next()) {
pc.exportToXls_File(sql, uID, uPW, file_excel);
}
Desktop.getDesktop().open(file_excel);
}
exportToXls_File Function
public void exportToXls_File(String sql, String userName, String passWord, File fileName)
throws SQLException, FileNotFoundException, IOException, Exception {
ActionEvent event = new ActionEvent();
DBConnection dbc = new DBConnection();
conn = dbc.getConnection(event, userName, passWord);
try {
try (
/**
* Create new Excel workbook and sheet
*/
HSSFWorkbook xlsWorkbook = new HSSFWorkbook()) {
HSSFSheet xlsSheet = xlsWorkbook.createSheet();
short rowIndex = 0;
/**
* Execute SQL query
*/
stmt = conn.prepareStatement(sql);
rs = stmt.executeQuery();
/**
* Get the list of column names and store them as the first Row
* of the spreadsheet.
*/
ResultSetMetaData colInfo = rs.getMetaData();
List<String> colNames = new ArrayList<>();
HSSFRow titleRow = xlsSheet.createRow(rowIndex++);
for (int i = 1; i <= colInfo.getColumnCount(); i++) {
colNames.add(colInfo.getColumnName(i));
titleRow.createCell((int) (i - 1)).setCellValue(
new HSSFRichTextString(colInfo.getColumnName(i)));
xlsSheet.setColumnWidth((int) (i - 1), (short) 4000);
}
/**
* Save all the data from the database table rows
*/
while (rs.next()) {
HSSFRow dataRow = xlsSheet.createRow(rowIndex++);
int colIndex = 0;
for (String colName : colNames) {
dataRow.createCell(colIndex++).setCellValue(
new HSSFRichTextString(rs.getString(colName)));
}
}
/**
* Write to disk
*/
xlsWorkbook.write(new FileOutputStream(fileName));
xlsWorkbook.close();
}
} catch (IOException | SQLException ex) {
out.println(ex);
} finally {
if (conn != null) {
stmt.close();
closeConnection((OracleConnection) conn);
conn.close();
}
}
}
Thank you
I am following a tutorial "Export data from sql server database to excel in wpf". Now I can achieve the function successfully. But in the exported Excel file, there is no column names (database column headers like CustomerId, CustomerName, city, postcode, telephoneNo...)
.
How can I get this feature? Also, how can I open a SaveAs dialogue? Thanks. The following is my code:
private void button1_Click(object sender, RoutedEventArgs e)
{
string sql = null;
string data = null;
int i = 0;
int j = 0;
Microsoft.Office.Interop.Excel.Application xlApp;
Microsoft.Office.Interop.Excel.Workbook xlWorkBook;
Microsoft.Office.Interop.Excel.Worksheet xlWorkSheet;
object misValue = System.Reflection.Missing.Value;
xlApp = new Microsoft.Office.Interop.Excel.Application();
xlWorkBook = xlApp.Workbooks.Add(misValue);
xlWorkSheet = (Microsoft.Office.Interop.Excel.Worksheet)xlWorkBook.Worksheets.get_Item(1);
SqlConnection cnn = new SqlConnection();
cnn.ConnectionString = #"Data Source=.\sqlexpress;Initial Catalog=Client;Integrated Security=SSPI;";
cnn.Open();
sql = "select * from Customers";
SqlDataAdapter dscmd = new SqlDataAdapter(sql, cnn);
DataSet ds = new DataSet();
dscmd.Fill(ds);
for (i = 0; i <= ds.Tables[0].Rows.Count - 1; i++)
{
for (j = 0; j <= ds.Tables[0].Columns.Count - 1; j++)
{
data = ds.Tables[0].Rows[i].ItemArray[j].ToString();
xlWorkSheet.Cells[i + 1, j + 1] = data;
}
}
xlWorkBook.Close(true, misValue, misValue);
xlApp.Quit();
releaseObject(xlWorkSheet);
releaseObject(xlWorkBook);
releaseObject(xlApp);
}
private void releaseObject(object obj)
{
try
{
System.Runtime.InteropServices.Marshal.ReleaseComObject(obj);
obj = null;
}
catch (Exception ex)
{
obj = null;
MessageBox.Show("Exception Occured while releasing object " + ex.ToString());
}
finally
{
GC.Collect();
}
}
I'm not an expert in this, but I believe that you can use an underscore character (_) in a Range object to select or edit a column header:
xlWorkSheet.Range["A1", _].Value2 = "Heading for first column";
I need some performance for doing some of my things. I'm trying to import excel data to my SQL Server database here is my code for doing that work but it really takes too much time for that. Could you give me some advice for that
[WebMethod]
public static string VerileriAktar(string alanlar, string gruplar, string shit)
{
ArtiDBEntities entity = new ArtiDBEntities();
string[] eslesmeler = alanlar.Split(',');
string[] grplar = gruplar.Split(',');
DataSet ds = (DataSet)HttpContext.Current.Session["ExcelVerileri"];
DataTable dt = ds.Tables["" + shit + ""];
MembershipUser gelen = (MembershipUser)HttpContext.Current.Session["kimo"];
Guid aa = (Guid)gelen.ProviderUserKey;
List<tbl_AltMusteriler> bulkliste = new List<tbl_AltMusteriler>();
List<tbl_AltMusteriler> ilkkontrol = entity.tbl_AltMusteriler.Where(o => o.UserId == aa).ToList();
List<tbl_AltMusteriler> grupicin = new List<tbl_AltMusteriler>();
List<tbl_OzelAlanlar> ensonatilacakalan = new List<tbl_OzelAlanlar>();
List<tbl_OzelTarihler> ensonalicaktarih = new List<tbl_OzelTarihler>();
// Datatable mın Kolon isimlerini değiştirdim.
foreach (string item_col_name in eslesmeler)
{
string alan = item_col_name.Split('=')[0].Split('_')[1];
string degisecek = item_col_name.Split('=')[1];
if (degisecek == "")
continue;
dt.Columns[degisecek].ColumnName = alan;
}
#region verilerde
foreach (DataRow dr in dt.Rows)
{
tbl_AltMusteriler yeni = new tbl_AltMusteriler();
foreach (DataColumn dtclm in dt.Columns)
{
string gsm1 = "";
if (dtclm.ColumnName == "gsm1")
gsm1 = dr["gsm1"].ToString();
string gsm2 = "";
if (dtclm.ColumnName == "gsm2")
gsm2 = dr["gsm2"].ToString();
string ad = "";
if (dtclm.ColumnName == "ad")
ad = dr["ad"].ToString();
string soyad = "";
if (dtclm.ColumnName == "soyad")
soyad = dr["soyad"].ToString();
if (gsm1 != "")
{
if (Tools.isNumber(gsm1) == false)
continue;
else
{
if (gsm1.Length > 10)
gsm1 = gsm1.Substring(1, 10);
yeni.Gsm1 = gsm1;
}
}
if (gsm2 != "")
{
if (Tools.isNumber(gsm2) == false)
continue;
else
{
if (gsm2.Length > 10)
gsm2 = gsm2.Substring(1, 10);
yeni.Gsm2 = gsm2;
}
}
if (ad != "")
yeni.Ad = ad;
if (soyad != "")
yeni.Soyad = soyad;
}
yeni.UserId = new Guid(aa.ToString());
if (yeni.Gsm1 != "")
grupicin.Add(yeni);
}
#endregion
bulkliste = grupicin.GroupBy(cust => cust.Gsm1).Select(grp => grp.First()).ToList();
List<tbl_AltMusteriler> yokartikin = bulkliste.Where(o => !ilkkontrol.Any(p => o.Gsm1 == p.Gsm1)).ToList();
int saybakim = yokartikin.Count();
DataTable bulkdt = new DataTable();
if (yokartikin.Count > 0)
{
Type listType = yokartikin.ElementAt(0).GetType();
PropertyInfo[] properties = listType.GetProperties();
foreach (PropertyInfo property in properties)
if (property.Name == "UserId")
bulkdt.Columns.Add(new DataColumn() { ColumnName = property.Name, DataType = typeof(Guid) });
else
bulkdt.Columns.Add(new DataColumn() { ColumnName = property.Name });
foreach (object itembulk in yokartikin)
{
DataRow drbk = bulkdt.NewRow();
foreach (DataColumn col in bulkdt.Columns)
drbk[col] = listType.GetProperty(col.ColumnName).GetValue(itembulk, null);
bulkdt.Rows.Add(drbk);
}
}
//var rowsOnlyInDt1 = bulkdt.AsEnumerable().Where(r => !bulkdt44.AsEnumerable()
// .Any(r2 => r["gsm1"].ToString() == r2["gsm1"].ToString()));
//DataTable result = rowsOnlyInDt1.CopyToDataTable();//The third table
if (bulkdt.Rows.Count > 0)
{
using (var connection = new SqlConnection(ConfigurationManager.ConnectionStrings["ArtiDBMemberShip"].ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.BulkCopyTimeout = 240;
sqlBulkCopy.DestinationTableName = "tbl_AltMusteriler";
sqlBulkCopy.ColumnMappings.Add("UserId", "UserId");
sqlBulkCopy.ColumnMappings.Add("Ad", "Ad");
sqlBulkCopy.ColumnMappings.Add("Soyad", "Soyad");
sqlBulkCopy.ColumnMappings.Add("Adres", "Adres");
sqlBulkCopy.ColumnMappings.Add("Gsm1", "Gsm1");
sqlBulkCopy.ColumnMappings.Add("Gsm2", "Gsm2");
sqlBulkCopy.ColumnMappings.Add("Faks", "Faks");
sqlBulkCopy.ColumnMappings.Add("Telefonis", "Telefonis");
sqlBulkCopy.ColumnMappings.Add("Telefonev", "Telefonev");
sqlBulkCopy.ColumnMappings.Add("Eposta", "Eposta");
sqlBulkCopy.ColumnMappings.Add("DogumTarihi", "DogumTarihi");
sqlBulkCopy.ColumnMappings.Add("EvlilikTar", "EvlilikTar");
sqlBulkCopy.ColumnMappings.Add("TcNo", "TcNo");
//sqlBulkCopy.ColumnMappings.Add("Deleted", "Deleted");
sqlBulkCopy.WriteToServer(bulkdt);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
entity.SaveChanges();
}
if (grplar.Length > 0)
{
List<tbl_AltMusteriler> guncelliste = entity.tbl_AltMusteriler.Where(o => o.UserId == aa).ToList();
List<tbl_KisiGrup> kisigruplari = new List<tbl_KisiGrup>();
foreach (tbl_AltMusteriler itemblkliste in bulkliste)
{
long AltMusteriIDsi = guncelliste.Where(o => o.Gsm1 == itemblkliste.Gsm1).FirstOrDefault().AltMusteriID;
// Seçili Gruplara kişileri ekleme
#region Gruplara ekleme
if (grplar.Length > 0)
{
foreach (string item_gruplar in grplar)
{
if (item_gruplar == "chkall")
continue;
if (item_gruplar == "")
continue;
if (item_gruplar == null)
continue;
tbl_KisiGrup yeni_kisi_grup = new tbl_KisiGrup()
{
AltMusteriID = AltMusteriIDsi,
GrupID = int.Parse(item_gruplar)
};
kisigruplari.Add(yeni_kisi_grup);
}
}
#endregion
}
List<tbl_KisiGrup> guncel_grup = entity.tbl_KisiGrup.Where(o => o.tbl_AltMusteriler.UserId == aa).ToList();
List<tbl_KisiGrup> kisi_grup_kaydet = kisigruplari.Where(o => !guncel_grup.Any(p => o.AltMusteriID == p.AltMusteriID && o.GrupID == p.GrupID)).ToList();
// Grupları Datatable çevirme
#region Grupları Datatable le çevirme
DataTable bulkdt2 = new DataTable();
if (kisi_grup_kaydet.Count > 0)
{
Type listType = kisi_grup_kaydet.ElementAt(0).GetType();
//Get element properties and add datatable columns
PropertyInfo[] properties = listType.GetProperties();
foreach (PropertyInfo property in properties)
bulkdt2.Columns.Add(new DataColumn() { ColumnName = property.Name });
foreach (object itembulk in kisi_grup_kaydet)
{
DataRow drbk = bulkdt2.NewRow();
foreach (DataColumn col in bulkdt2.Columns)
drbk[col] = listType.GetProperty(col.ColumnName).GetValue(itembulk, null);
bulkdt2.Rows.Add(drbk);
}
}
#endregion
//Burada bulk insert işlemini gerçekleştiriyoruz...
#region Grup Verileri BulkCopy ile birkerede yazdık
using (var connection = new SqlConnection(ConfigurationManager.ConnectionStrings["ArtiDBMemberShip"].ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.BulkCopyTimeout = 240;
sqlBulkCopy.DestinationTableName = "tbl_KisiGrup";
sqlBulkCopy.ColumnMappings.Add("AltMusteriID", "AltMusteriID");
sqlBulkCopy.ColumnMappings.Add("GrupID", "GrupID");
sqlBulkCopy.WriteToServer(bulkdt2);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
entity.SaveChanges();
#endregion
}
return "ok";
}
EDIT
actually that codeblock takes time when if there is 70.000 or more rows data
List<tbl_AltMusteriler> yokartikin = bulkliste.Where(o => !ilkkontrol.Any(p => o.Gsm1 == p.Gsm1)).ToList();
I think the my main problem is while I'm just inserting data with sqlbulkcopy. After that I couldn't get the identity ids, for that reason I get data to a generic list and try to find that new ids and creating a new list of group. and sqlbulkcopy again. these are takes lots of time about 10 minutes to import 65.000 rows is there another way to do those things
Your question is very broad. I would recomment reading performance considerations for EF. Also keep in mind that EF is not really meant for bulk operations since it brings all data from the database to the client. This adds a lot of overhead if you want to do this for a lot of entities if you don't need to/want to process them on the client. (Note I have not really looked into your code - it's too much)