Entity Framework performance after SqlBulkCopy - sql-server

I need some performance for doing some of my things. I'm trying to import excel data to my SQL Server database here is my code for doing that work but it really takes too much time for that. Could you give me some advice for that
[WebMethod]
public static string VerileriAktar(string alanlar, string gruplar, string shit)
{
ArtiDBEntities entity = new ArtiDBEntities();
string[] eslesmeler = alanlar.Split(',');
string[] grplar = gruplar.Split(',');
DataSet ds = (DataSet)HttpContext.Current.Session["ExcelVerileri"];
DataTable dt = ds.Tables["" + shit + ""];
MembershipUser gelen = (MembershipUser)HttpContext.Current.Session["kimo"];
Guid aa = (Guid)gelen.ProviderUserKey;
List<tbl_AltMusteriler> bulkliste = new List<tbl_AltMusteriler>();
List<tbl_AltMusteriler> ilkkontrol = entity.tbl_AltMusteriler.Where(o => o.UserId == aa).ToList();
List<tbl_AltMusteriler> grupicin = new List<tbl_AltMusteriler>();
List<tbl_OzelAlanlar> ensonatilacakalan = new List<tbl_OzelAlanlar>();
List<tbl_OzelTarihler> ensonalicaktarih = new List<tbl_OzelTarihler>();
// Datatable mın Kolon isimlerini değiştirdim.
foreach (string item_col_name in eslesmeler)
{
string alan = item_col_name.Split('=')[0].Split('_')[1];
string degisecek = item_col_name.Split('=')[1];
if (degisecek == "")
continue;
dt.Columns[degisecek].ColumnName = alan;
}
#region verilerde
foreach (DataRow dr in dt.Rows)
{
tbl_AltMusteriler yeni = new tbl_AltMusteriler();
foreach (DataColumn dtclm in dt.Columns)
{
string gsm1 = "";
if (dtclm.ColumnName == "gsm1")
gsm1 = dr["gsm1"].ToString();
string gsm2 = "";
if (dtclm.ColumnName == "gsm2")
gsm2 = dr["gsm2"].ToString();
string ad = "";
if (dtclm.ColumnName == "ad")
ad = dr["ad"].ToString();
string soyad = "";
if (dtclm.ColumnName == "soyad")
soyad = dr["soyad"].ToString();
if (gsm1 != "")
{
if (Tools.isNumber(gsm1) == false)
continue;
else
{
if (gsm1.Length > 10)
gsm1 = gsm1.Substring(1, 10);
yeni.Gsm1 = gsm1;
}
}
if (gsm2 != "")
{
if (Tools.isNumber(gsm2) == false)
continue;
else
{
if (gsm2.Length > 10)
gsm2 = gsm2.Substring(1, 10);
yeni.Gsm2 = gsm2;
}
}
if (ad != "")
yeni.Ad = ad;
if (soyad != "")
yeni.Soyad = soyad;
}
yeni.UserId = new Guid(aa.ToString());
if (yeni.Gsm1 != "")
grupicin.Add(yeni);
}
#endregion
bulkliste = grupicin.GroupBy(cust => cust.Gsm1).Select(grp => grp.First()).ToList();
List<tbl_AltMusteriler> yokartikin = bulkliste.Where(o => !ilkkontrol.Any(p => o.Gsm1 == p.Gsm1)).ToList();
int saybakim = yokartikin.Count();
DataTable bulkdt = new DataTable();
if (yokartikin.Count > 0)
{
Type listType = yokartikin.ElementAt(0).GetType();
PropertyInfo[] properties = listType.GetProperties();
foreach (PropertyInfo property in properties)
if (property.Name == "UserId")
bulkdt.Columns.Add(new DataColumn() { ColumnName = property.Name, DataType = typeof(Guid) });
else
bulkdt.Columns.Add(new DataColumn() { ColumnName = property.Name });
foreach (object itembulk in yokartikin)
{
DataRow drbk = bulkdt.NewRow();
foreach (DataColumn col in bulkdt.Columns)
drbk[col] = listType.GetProperty(col.ColumnName).GetValue(itembulk, null);
bulkdt.Rows.Add(drbk);
}
}
//var rowsOnlyInDt1 = bulkdt.AsEnumerable().Where(r => !bulkdt44.AsEnumerable()
// .Any(r2 => r["gsm1"].ToString() == r2["gsm1"].ToString()));
//DataTable result = rowsOnlyInDt1.CopyToDataTable();//The third table
if (bulkdt.Rows.Count > 0)
{
using (var connection = new SqlConnection(ConfigurationManager.ConnectionStrings["ArtiDBMemberShip"].ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.BulkCopyTimeout = 240;
sqlBulkCopy.DestinationTableName = "tbl_AltMusteriler";
sqlBulkCopy.ColumnMappings.Add("UserId", "UserId");
sqlBulkCopy.ColumnMappings.Add("Ad", "Ad");
sqlBulkCopy.ColumnMappings.Add("Soyad", "Soyad");
sqlBulkCopy.ColumnMappings.Add("Adres", "Adres");
sqlBulkCopy.ColumnMappings.Add("Gsm1", "Gsm1");
sqlBulkCopy.ColumnMappings.Add("Gsm2", "Gsm2");
sqlBulkCopy.ColumnMappings.Add("Faks", "Faks");
sqlBulkCopy.ColumnMappings.Add("Telefonis", "Telefonis");
sqlBulkCopy.ColumnMappings.Add("Telefonev", "Telefonev");
sqlBulkCopy.ColumnMappings.Add("Eposta", "Eposta");
sqlBulkCopy.ColumnMappings.Add("DogumTarihi", "DogumTarihi");
sqlBulkCopy.ColumnMappings.Add("EvlilikTar", "EvlilikTar");
sqlBulkCopy.ColumnMappings.Add("TcNo", "TcNo");
//sqlBulkCopy.ColumnMappings.Add("Deleted", "Deleted");
sqlBulkCopy.WriteToServer(bulkdt);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
entity.SaveChanges();
}
if (grplar.Length > 0)
{
List<tbl_AltMusteriler> guncelliste = entity.tbl_AltMusteriler.Where(o => o.UserId == aa).ToList();
List<tbl_KisiGrup> kisigruplari = new List<tbl_KisiGrup>();
foreach (tbl_AltMusteriler itemblkliste in bulkliste)
{
long AltMusteriIDsi = guncelliste.Where(o => o.Gsm1 == itemblkliste.Gsm1).FirstOrDefault().AltMusteriID;
// Seçili Gruplara kişileri ekleme
#region Gruplara ekleme
if (grplar.Length > 0)
{
foreach (string item_gruplar in grplar)
{
if (item_gruplar == "chkall")
continue;
if (item_gruplar == "")
continue;
if (item_gruplar == null)
continue;
tbl_KisiGrup yeni_kisi_grup = new tbl_KisiGrup()
{
AltMusteriID = AltMusteriIDsi,
GrupID = int.Parse(item_gruplar)
};
kisigruplari.Add(yeni_kisi_grup);
}
}
#endregion
}
List<tbl_KisiGrup> guncel_grup = entity.tbl_KisiGrup.Where(o => o.tbl_AltMusteriler.UserId == aa).ToList();
List<tbl_KisiGrup> kisi_grup_kaydet = kisigruplari.Where(o => !guncel_grup.Any(p => o.AltMusteriID == p.AltMusteriID && o.GrupID == p.GrupID)).ToList();
// Grupları Datatable çevirme
#region Grupları Datatable le çevirme
DataTable bulkdt2 = new DataTable();
if (kisi_grup_kaydet.Count > 0)
{
Type listType = kisi_grup_kaydet.ElementAt(0).GetType();
//Get element properties and add datatable columns
PropertyInfo[] properties = listType.GetProperties();
foreach (PropertyInfo property in properties)
bulkdt2.Columns.Add(new DataColumn() { ColumnName = property.Name });
foreach (object itembulk in kisi_grup_kaydet)
{
DataRow drbk = bulkdt2.NewRow();
foreach (DataColumn col in bulkdt2.Columns)
drbk[col] = listType.GetProperty(col.ColumnName).GetValue(itembulk, null);
bulkdt2.Rows.Add(drbk);
}
}
#endregion
//Burada bulk insert işlemini gerçekleştiriyoruz...
#region Grup Verileri BulkCopy ile birkerede yazdık
using (var connection = new SqlConnection(ConfigurationManager.ConnectionStrings["ArtiDBMemberShip"].ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.BulkCopyTimeout = 240;
sqlBulkCopy.DestinationTableName = "tbl_KisiGrup";
sqlBulkCopy.ColumnMappings.Add("AltMusteriID", "AltMusteriID");
sqlBulkCopy.ColumnMappings.Add("GrupID", "GrupID");
sqlBulkCopy.WriteToServer(bulkdt2);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
entity.SaveChanges();
#endregion
}
return "ok";
}
EDIT
actually that codeblock takes time when if there is 70.000 or more rows data
List<tbl_AltMusteriler> yokartikin = bulkliste.Where(o => !ilkkontrol.Any(p => o.Gsm1 == p.Gsm1)).ToList();
I think the my main problem is while I'm just inserting data with sqlbulkcopy. After that I couldn't get the identity ids, for that reason I get data to a generic list and try to find that new ids and creating a new list of group. and sqlbulkcopy again. these are takes lots of time about 10 minutes to import 65.000 rows is there another way to do those things

Your question is very broad. I would recomment reading performance considerations for EF. Also keep in mind that EF is not really meant for bulk operations since it brings all data from the database to the client. This adds a lot of overhead if you want to do this for a lot of entities if you don't need to/want to process them on the client. (Note I have not really looked into your code - it's too much)

Related

ASP.NET Core - The best way to add lots of records to a SQL Server database?

I have an Excel file that contains about 4400 records, I used the following loop to save these records in the database.
public async Task<IActionResult> AddExcellHorse(IFormFile ExcelFile)
{
using (var stream = new MemoryStream())
{
await ExcelFile.CopyToAsync(stream);
using (var package = new ExcelPackage(stream))
{
ExcelWorksheet worksheet = package.Workbook.Worksheets[0];
var rowCount = worksheet.Dimension.Rows;
for (int i = 2; i <= rowCount; i++)
{
if (worksheet.Cells[i, 1].Value != null)
{
AddExcelHorse viewModel = new AddExcelHorse()
{
MicrochipCode = worksheet.Cells[i, 1].Value?.ToString().Trim() ?? String.Empty,
EnHorseName = worksheet.Cells[i, 2].Value?.ToString().Trim() ?? String.Empty,
EnFatherHorseName = worksheet.Cells[i, 3].Value?.ToString().Trim() ?? String.Empty,
EnMotherHorseName = worksheet.Cells[i, 4].Value?.ToString().Trim() ?? String.Empty,
};
if (viewModel.MicrochipCode != null)
{
if (!_admin.ChechMicrochip(viewModel.MicrochipCode))
{
_admin.AddExcelHorse(viewModel);
}
}
}
}
return RedirectToAction(nameof(Index));
}
}
}
When I select the Excel file whose records are recorded, I get the following error.
[Win32Exception: The wait operation timed out]
What is the best way to do this?
AddRangeAsync :
public async Task<IActionResult> AddExcellHorse(IFormFile ExcelFile)
{
var dataList = new AddExcelHorse();
using (var stream = new MemoryStream())
{
await ExcelFile.CopyToAsync(stream);
using (var package = new ExcelPackage(stream))
{
ExcelWorksheet worksheet = package.Workbook.Worksheets[0];
var rowCount = worksheet.Dimension.Rows;
for (int i = 2; i <= rowCount; i++)
{
if (worksheet.Cells[i, 1].Value != null)
{
AddExcelHorse viewModel = new AddExcelHorse()
{
MicrochipCode = worksheet.Cells[i, 1].Value?.ToString().Trim() ?? String.Empty,
EnHorseName = worksheet.Cells[i, 2].Value?.ToString().Trim() ?? String.Empty,
EnFatherHorseName = worksheet.Cells[i, 3].Value?.ToString().Trim() ?? String.Empty,
EnMotherHorseName = worksheet.Cells[i, 4].Value?.ToString().Trim() ?? String.Empty,
};
dataList.Add(viewModel);
}
}
if (dataList.Count > 1)
{
_context.TableName.AddRange(dataList);
_context.SaveChangesAsync();
}
return RedirectToAction(nameof(Index));
}
}
}

Query does not return all data from database

I have a C# 4.0 WinForms App using SQL Server 2012 database.
On one of my forms, I select a range of dates from a MonthCalendar.
In a query using a SqlDataAdapter, the query should return 4-names of people from a table.
After filling the DataTable, the "for" loop successfully pulls the first name.
On the next iteration, it also pulls the 2nd person's name from the table.
However, on the 3rd iteration, it again pulls the 2nd person's name and does not retrieve the remaining 2-names.
Using SSMS, I can see all 4-names of the people I'm querying. If I use the query below in SSMS, I again get all 4-names.
Does anyone have an idea why the code below fails to return all 4-names, but returns a previous name?
Here is the code I'm using to query the SQL Server database.
private string ReturnPerson(string dStart, string dEnd)
{
string myPerson = "";
try
{
using (SqlConnection conn = new SqlConnection(#"..."))
{
conn.Open();
using (SqlDataAdapter adap = new SqlDataAdapter("SELECT person, scheduledDate FROM Assignments WHERE scheduledDate BETWEEN #start AND #end ORDER BY scheduledDate ASC", conn))
{
adap.SelectCommand.Parameters.Add("#start", SqlDbType.NVarChar).Value = dStart;
adap.SelectCommand.Parameters.Add("#end", SqlDbType.NVarChar).Value = dEnd;
using (DataTable dt = new DataTable())
{
adap.Fill(dt);
for (int i = 0; i < dt.Rows.Count - 1; i++)
{
DataRow row = dt.Rows[i];
DataRow nextRow = dt.Rows[i + 1];
if (personRowCounter == 0)//rowCounter declared globaly
{
myPerson = row.Field<string>("person").ToString();
personRowCounter++;
return myPerson;
}
else if (personRowCounter > 0)
{
myPerson = nextRow.Field<string>("person").ToString();
personRowCounter++;
return myPerson;
}
}
}
}
}
}
catch (SqlException ex) { MessageBox.Show(ex.Message); }
catch (System.Exception ex) { MessageBox.Show(ex.Message); }
return myPerson;
}
If you start with personRowCounter =0
Then the first call will return row 0 as personRowCounter = 0 and i = 0 and it will set personRowCounter = 1
Then the next call will return row 1 as personRowCounter > 0 and i= 0 and it will set personRowCounter = 1
And all calls after that will return row 1 as personRowCounter > 0 and the loop always starts from 0
I'm not sure what you're intent is here. But, I believe that the problem lies with
DataRow nextRow = dt.Rows[i + 1];
That will throw an exception when i is pointing to the last row. Because [i + 1] will index beyond the end of dt.rows.
Based on the comments above, I've figured out the solution. I needed to adjust the DataRow rows, as per below. This allowed me to pull in the remaining data.
private string ReturnPerson(string dStart, string dEnd)
{
string myPerson = "";
try
{
using (SqlConnection conn = new SqlConnection(#"..."))
conn.Open();
using (SqlDataAdapter adap = new SqlDataAdapter("SELECT person, scheduledDate FROM Assignments WHERE scheduledDate BETWEEN #start AND #end ORDER BY scheduledDate ASC", conn))
{
adap.SelectCommand.Parameters.Add("#start", SqlDbType.NVarChar).Value = dStart;
adap.SelectCommand.Parameters.Add("#end", SqlDbType.NVarChar).Value = dEnd;
using (DataTable dt = new DataTable())
{
adap.Fill(dt);
for (int i = 0; i < dt.Rows.Count - 1; i++)
{
DataRow row0 = dt.Rows[i];
DataRow row1 = dt.Rows[i + 1];
DataRow row2 = dt.Rows[i + 2];
DataRow row3 = dt.Rows[i + 3];
if (dt.Rows.Count > 4)
{
DataRow row4 = dt.Rows[4];
}
if (personRowCounter == 0)//rowCounter declared globaly
{
myPerson = row0.Field<string>("person").ToString();
personRowCounter++;
return myPerson;
}
else if (personRowCounter == 1)
{
myPerson = row1.Field<string>("person").ToString();
personRowCounter++;
return myPerson;
}
else if (personRowCounter == 2)
{
myPerson = row2.Field<string>("person").ToString();
personRowCounter++;
return myPerson;
"etc.";
}
}
}
}
}
catch (SqlException ex) { MessageBox.Show(ex.Message); }
catch (System.Exception ex) { MessageBox.Show(ex.Message); }
return myPerson;
}

Is there a way to restore a SQL Server backup in RDS from S3 by using cloudformation?

I am using CFN to create an HA environment and RDS seems to be the best way for SQL Server DB rather than instances. Now I have tried manual deployment taking RDS and restoring .bak using option group and connecting it with S3 by IAM and EC2. But I am facing a wall when doing the same with CFN automation. Is there a way?
I don't have this in a state where I can "package it up for you", but this should give you a good head start....
public abstract class DatabaseFunctionBase
{
protected static bool IsTaskComplete(SqlConnection sqlConnection, int task)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("task_id", SqlDbType.Int).Value = task;
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
return status == "SUCCESS";
}
}
return false;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected int GetTaskId(SqlConnection sqlConnection, string dbName)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("db_name", SqlDbType.VarChar).Value = dbName;
do
{
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
var id = reader.GetInt32(0);
var db = reader.GetString(2);
if ((status == "CREATED" || status == "IN_PROGRESS") && db == dbName)
{
return id;
}
}
Thread.Sleep(TimeSpan.FromSeconds(5));
}
} while (true);
throw new InvalidOperationException();
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected async Task BackupDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
InitialCatalog = info.DbCatalog,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_backup_database";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("source_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
command.Parameters.Add("s3_arn_to_backup_to", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.DbCatalog}{DateTime.Now:O}.bak";
command.Parameters.Add("overwrite_S3_backup_file", SqlDbType.TinyInt).Value = 1;
command.ExecuteNonQuery();
var taskId = this.GetTaskId(sqlConnection, info.DbCatalog);
//LambdaLogger.Log($"{nameof(taskId)}={taskId}");
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
protected async Task CreateDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true,
InitialCatalog = info.DbCatalog
};
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
try
{
sqlConnection.Open();
// already present - exit
return;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
}
}
// remove the catalog so we can connect to the server directly
sqlConnectionStringBuilder.InitialCatalog = string.Empty;
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
sqlConnection.Open();
await using var restoreCommand = sqlConnection.CreateCommand();
restoreCommand.CommandText = "msdb.dbo.rds_restore_database";
restoreCommand.CommandType = CommandType.StoredProcedure;
restoreCommand.Parameters.Add("restore_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
restoreCommand.Parameters.Add("s3_arn_to_restore_from", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.FromCatalog}.bak";
restoreCommand.ExecuteNonQuery();
var taskId = GetTaskId(sqlConnection, info.DbCatalog);
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
// this might be redundant in a merge
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
await using var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
try
{
sqlConnection.Open();
break;
}
catch (Exception exception)
{
//LambdaLogger.Log(exception.ToString());
await Task.Delay(TimeSpan.FromSeconds(5));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
try
{
await using var sqlConnection2 = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
sqlConnection2.Open();
break;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
await Task.Delay(TimeSpan.FromSeconds(15));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
}
protected async Task DeleteDatabase(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
if (string.Equals(info.DbCatalog.ToString(), "app", StringComparison.InvariantCultureIgnoreCase))
{
return;
}
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var dropCommand = sqlConnection.CreateCommand();
dropCommand.CommandText = "msdb.dbo.rds_drop_database";
dropCommand.CommandType = CommandType.StoredProcedure;
dropCommand.Parameters.Add("db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
dropCommand.ExecuteNonQuery();
}
}

Google Drive - Authorize once on a single machine

I am new to Google Drive and have following scenarios for which I am not able to find anything (not sure if anything exists or not)
–> I am creating a Windows app which will be SAAS based. Different Users will register and create their company logins and subusers under them. Now I want them to put the google drive credentials in one of the form and this should work for rest of the users. Currently the problem is that while development I got the google log in done and it never asks for the login again but when testing on a different system with different login, it keeps asking for google login. I simply want admin users to put their google drive credentials and it should work for upload and download files for all the users for that company.
–> I want to keep versions of the same file (just like google drive does by default) on google drive. Lets say user A uploaded file xyz and then user B downloaded and changed file xyz and uploaded it on the drive again.
I want 2 things here – only the changed content should get uploaded and not the whole file (this will save time for the user)
2ndly I want to have history of the same file so I can show in my Windows application
#region Get Service Object
UserCredential credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
new ClientSecrets
{
ClientId = "GoogleDriveClientID",
ClientSecret = "GoogleDriveClientSecret"
},
new[] { DriveService.Scope.Drive }, "user", CancellationToken.None).Result;
// Create the service.
service = new DriveService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = "AppName",
});
#endregion
#region Uploading
public void uploadOnGoogleDrive(ObservableCollection<JobAttachments> AttachmentsColl, bool IsDocSaved)
{
try
{
service = getServiceObject();
List<Google.Apis.Drive.v2.Data.File> fileList = retrieveAllFiles(service);
List<Google.Apis.Drive.v2.Data.File> directoryList = GetDirectoryList(service);
if (IsDocSaved)
{
#region for checking if the file already exists
foreach (Google.Apis.Drive.v2.Data.File item in fileList)
{
foreach (JobAttachments attach in AttachmentsColl)
{
if (item.Title == attach.AttachmtGUID)
{
MessageBoxResult result = System.Windows.MessageBox.Show(LogMessages.GetResourceMessage(LogMessages.MessageEnumeration.GD_AlreadyExistsMsg), "Confirmation", MessageBoxButton.YesNoCancel);
if (result == MessageBoxResult.Yes)
{
//DeleteFile(service, item);
Google.Apis.Drive.v2.Data.File body = new Google.Apis.Drive.v2.Data.File();
body.Title = attach.AttachmtGUID;
body.MimeType = item.MimeType;
fileSize = body.FileSize;
byte[] byteArray = System.IO.File.ReadAllBytes(attach.AttachmentName);
System.IO.MemoryStream stream = new System.IO.MemoryStream(byteArray);
FilesResource.UpdateMediaUpload request = service.Files.Update(body, item.Id, stream, item.MimeType);
request.Upload();
}
else
{
return;
}
break;
}
}
}
#endregion
}
else
{
#region for direct uploading on google drive
if (AttachmentsCollection != null && AttachmentsCollection.Count > 0)
{
string folderID = string.Empty;
if (_IsProject)
{
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_ProjectName))
{
folderID = dir.Id;
break;
}
}
}
if (string.IsNullOrEmpty(folderID))
{
Google.Apis.Drive.v2.Data.File foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _ProjectName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = "root" } };
Google.Apis.Drive.v2.Data.File file = service.Files.Insert(foldbody).Execute();
folderID = file.Id;
}
}
else
{
//project folder
string prjFolder = string.Empty;
string tskFolder = string.Empty;
Google.Apis.Drive.v2.Data.File foldbody;
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_ProjectName))
{
prjFolder = dir.Id;
break;
}
}
}
if (string.IsNullOrEmpty(prjFolder))
{
foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _ProjectName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = "root" } };
Google.Apis.Drive.v2.Data.File file = service.Files.Insert(foldbody).Execute();
prjFolder = file.Id;
}
//task folder
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_TaskName) && dir.Parents[0].Id.Equals(prjFolder))
{
folderID = dir.Id;
break;
}
}
}
if (string.IsNullOrWhiteSpace(folderID))
{
foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _TaskName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = prjFolder } };
Google.Apis.Drive.v2.Data.File file1 = service.Files.Insert(foldbody).Execute();
folderID = file1.Id;
}
}
foreach (JobAttachments item in AttachmentsColl)
{
if (!string.IsNullOrEmpty(item.AttachmentName))
{
Google.Apis.Drive.v2.Data.File body = new Google.Apis.Drive.v2.Data.File();
body.Title = item.AttachmtGUID;
body.MimeType = item.MimeType;
body.Parents = new List<ParentReference>() { new ParentReference() { Id = folderID } };
//fileSize = body.FileSize;
byte[] byteArray = System.IO.File.ReadAllBytes(item.AttachmentName);
System.IO.MemoryStream stream = new System.IO.MemoryStream(byteArray);
FilesResource.InsertMediaUpload request = service.Files.Insert(body, stream, item.MimeType);
request.Upload();
}
}
}
#endregion
}
}
catch (Exception ex)
{
if (ex.InnerException != null)
throw ex.InnerException;
}
}
#endregion
#region Download File
private async Task DownloadFile(DriveService service, string url, string title, long? fSize)
{
service = getServiceObject();
var downloader = new MediaDownloader(service);
//downloader.ChunkSize = 256 * 1024;
downloader.ProgressChanged += Download_ProgressChanged;
var fileName = string.Empty;
//for downloading on system
var SaveFileDialog = new SaveFileDialog();
SaveFileDialog.Title = "Save As";
SaveFileDialog.FileName = title;
Nullable<bool> result = SaveFileDialog.ShowDialog();
if (result == true)
fileName = SaveFileDialog.FileName;
else if (result == false)
{
prgrsBar.StyleSettings = new ProgressBarStyleSettings();
prgrsBar.Value = 0;
return;
}
else
{
if (Directory.Exists(#"\Downloads"))
fileName = #"\Downloads\" + title;
}
if (!string.IsNullOrWhiteSpace(fileName))
using (var fileStream = new System.IO.FileStream(fileName, System.IO.FileMode.Create, System.IO.FileAccess.Write))
{
fileSize = fSize;
var progress = await downloader.DownloadAsync(url, fileStream);
if (progress.Status.ToString() == DownloadStatus.Completed.ToString())
{
fName = fileStream.Name;
prgrsBar.StyleSettings = new ProgressBarStyleSettings();
prgrsBar.Value = 0;
fileStream.Flush();
}
if (progress.Status.ToString() == DownloadStatus.Failed.ToString())
{
HandleDocuments.IsEditButtonClicked = false;
MessageBox.Show("Failed......." + progress.Exception.Message);
}
}
}
#endregion
#region Delete File
private Task DeleteFile(DriveService service, Google.Apis.Drive.v2.Data.File file)
{
service = getServiceObject(); //comment this if calling from another function; create the service object in that function and pass it as parameter to this function.
service.Files.Delete(file.Id).ExecuteAsync();
service.Files.EmptyTrash();
return null;
}
#endregion
#region Get all Directories and Files from Google Drive
public List<Google.Apis.Drive.v2.Data.File> GetDirectoryList(DriveService service)
{
//Creating the global list
List<Google.Apis.Drive.v2.Data.File> AllDirectories = new List<Google.Apis.Drive.v2.Data.File>();
//setting up the Request.
FilesResource.ListRequest request = service.Files.List();
//MaxResults: How many we want back at a time max is 1000
request.MaxResults = 1000;
//Q: Search results. all i want are folders that havent been trashed (deleted)
request.Q = "mimeType='application/vnd.google-apps.folder' and trashed=false";
do
{
try
{
// getting the results
FileList files = request.Execute();
// adding the results to the list.
AllDirectories.AddRange(files.Items);
// If there are more results then your MaxResults you will have a nextPageToken to get the rest of the results.
request.PageToken = files.NextPageToken;
}
catch (Exception ex)
{
request.PageToken = null;
if (ex.InnerException != null)
throw ex.InnerException;
}
} while (!String.IsNullOrEmpty(request.PageToken));
List<Google.Apis.Drive.v2.Data.File> DirsInRoot = AllDirectories.Where(a => (a.Parents.Count > 0 && a.Parents.FirstOrDefault().IsRoot.HasValue) ? a.Parents.FirstOrDefault().IsRoot.Value : false).ToList<Google.Apis.Drive.v2.Data.File>();
List<string> HirearcyList = new List<string>();
// The first Dir is Root it doesnt get returned. But we need it if we
// Want to be able to list the files that are in the root dir.
HirearcyList.Add("Root");
// recersive magic here.
foreach (Google.Apis.Drive.v2.Data.File myDir in DirsInRoot)
{
HirearcyList.Add(" " + myDir.Title);
HirearcyList.AddRange(RecsiveDir(AllDirectories, myDir.Id, " "));
}
return AllDirectories;
}
public List<String> RecsiveDir(List<Google.Apis.Drive.v2.Data.File> allDirs, string ParentId, string Prefix)
{
List<string> result = new List<string>();
List<Google.Apis.Drive.v2.Data.File> DirsInParentId = allDirs.Where(a => (a.Parents.Count > 0 && a.Parents.FirstOrDefault().IsRoot.HasValue) ? a.Parents.FirstOrDefault().Id == ParentId : false).ToList<Google.Apis.Drive.v2.Data.File>();
foreach (Google.Apis.Drive.v2.Data.File myDir in DirsInParentId)
{
result.Add(Prefix + myDir.Title);
result.AddRange(RecsiveDir(allDirs, myDir.Id, Prefix + " "));
}
return result;
}
public static List<Google.Apis.Drive.v2.Data.File> retrieveAllFiles(DriveService service)
{
List<Google.Apis.Drive.v2.Data.File> result = new List<Google.Apis.Drive.v2.Data.File>();
FilesResource.ListRequest request = service.Files.List();
request.MaxResults = 1000;
do
{
try
{
FileList files = request.Execute();
result.AddRange(files.Items);
request.PageToken = files.NextPageToken;
//service.Revisions.List(files.Items[0].Id) // for getting the file Revision history
}
catch (Exception ex)
{
request.PageToken = null;
if (ex.InnerException != null)
throw ex.InnerException;
}
} while (!String.IsNullOrEmpty(request.PageToken));
return result;
}
#endregion
Thanks
Jatinder

How to retrieve parentaccountid from account using queryexpression and silverlight

My query returns account.name, account.account and account.parentaccountid.
I'm using Silverlight and CRM2011.
Now I'm having trouble to find out how to extract value from parentaccountid attribute.
I have silverlightextensionmethods.cs file included in my VS project, and I'm using GetAttributeValue<Guid>("parentaccountid") to get the value from parentaccountid.
The value returned is empty.
Has anyone any ideas how to accomplish this?
I can get any other attribute value, but parentaccountid in account and parentcustomerid in contact are making my life very difficult.
Code:
FIRST I CREATE QUERYEXPRESSION:
string temp="name;accountid;parentaccountid";
string[] fields = temp.Split(';');
QueryExpression query = new QueryExpression()
{
EntityName = entity,
ColumnSet = new ColumnSet { Columns = new System.Collections.ObjectModel.ObservableCollection<string>(fields) },
Criteria = new FilterExpression
{
FilterOperator = LogicalOperator.And,
Conditions =
{
new ConditionExpression
{
AttributeName = parentidfield,
Operator = ConditionOperator.Equal,
Values = { id }
}
}
}
};
OrganizationRequest req = new OrganizationRequest();
req.RequestName = "RetrieveMultiple";
req["Query"] = query;
service.BeginExecute(req, new AsyncCallback(GetChildren_ExecuteCallBack), service);
NEXT I TY TO READ VALUES FORM RESPONSE
void GetChildren_ExecuteCallBack(IAsyncResult childresult)
{
List<TreeRecord> listc = new List<TreeRecord>();
try
{
OrganizationResponse childresponse = ((IOrganizationService)childresult.AsyncState).EndExecute(childresult);
EntityCollection childresults = (EntityCollection)childresponse["EntityCollection"];
if (childresults.Entities.Count > 0)
{
TreeConfig sitm = new TreeConfig();
string sdisplay = "";
string[] fields = "".Split(';');
string sid = "";
string pid = "";
foreach (Entity childentity in childresults.Entities)
{
foreach (TreeConfig sitem in Configs)
{
if (sitem.EntityName == childentity.LogicalName)
{
sitm = sitem;
}
}
TreeRecord childitem = new TreeRecord();
string sValue = "";
sdisplay = "name;accountid;parentaccountid";
fields = sdisplay.Split(';');
sid = "accountid";
pid = "parentaccountid";
int i = sdisplay.Split(';').Length;
for (int j = 0; j < i; j++)
{
try { sValue += childentity.GetAttributeValue<string>(fields[j]) + " "; }
catch (Exception ex)
{
//s = "sValue haku: " + ex.Message.ToString();
//this.ReportMessage(s.ToString());
}
}
childitem.Name = sValue;
childitem.EntityName = childentity.LogicalName;
childitem.Level = sitm.Level;
childitem.ParentEntityName = sitm.EntityName;
childitem.Color = sitm.Color;
childitem.RecordId = childentity.GetEntityId<Guid>(sid);
try { childitem.ParentId = childentity.GetAttributeValue<Guid>(pid); }
catch
{
//sb.AppendLine("guid: parentid tietoa ei löydy");
//this.ReportMessage(sb.ToString());
}
listc.Add(childitem);
}
}
}
Instead of
childentity.GetAttributeValue<Guid>(pid)
use
childentity.GetAttributeValue<EntityReference>(pid)

Resources