Validating Azure AD JWT in C# - azure-active-directory

Validate the azure AD jwt from your C# code by extracting the appid/clientid from the token. The below mentioned code will take token as string and validate your client id with the clientid/appid extracted from the token
public bool Validate(string token)
{
string clientId = "Your appid/clientid";
string stsDiscoveryEndpoint = "https://login.microsoftonline.com/common/v2.0/.well-known/openid-configuration";
ConfigurationManager<OpenIdConnectConfiguration> configManager = new ConfigurationManager<OpenIdConnectConfiguration>(stsDiscoveryEndpoint, new OpenIdConnectConfigurationRetriever());
OpenIdConnectConfiguration config = configManager.GetConfigurationAsync().Result;
TokenValidationParameters validationParameters = new TokenValidationParameters
{
ValidateAudience = false,
ValidateIssuer = false,
IssuerSigningKey = config.SigningKeys.FirstOrDefault(),
ValidateLifetime = false
};
System.IdentityModel.Tokens.Jwt.JwtSecurityTokenHandler tokendHandler = new JwtSecurityTokenHandler();
SecurityToken jwt;
var result = tokendHandler.ValidateToken(token, validationParameters, out jwt);
ClaimsPrincipal claims = result;
var claimValue = claims.Claims.GetEnumerator();
string appId = "";
if (claimValue != null)
{
while (claimValue.MoveNext())
{
if (claimValue.Current.Type == "appid")
{
appId = claimValue.Current.Value;
break;
}
}
}
if (appId == clientId)
{
return true;
}
else
{
return false;
}
}

Related

Is there a way to restore a SQL Server backup in RDS from S3 by using cloudformation?

I am using CFN to create an HA environment and RDS seems to be the best way for SQL Server DB rather than instances. Now I have tried manual deployment taking RDS and restoring .bak using option group and connecting it with S3 by IAM and EC2. But I am facing a wall when doing the same with CFN automation. Is there a way?
I don't have this in a state where I can "package it up for you", but this should give you a good head start....
public abstract class DatabaseFunctionBase
{
protected static bool IsTaskComplete(SqlConnection sqlConnection, int task)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("task_id", SqlDbType.Int).Value = task;
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
return status == "SUCCESS";
}
}
return false;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected int GetTaskId(SqlConnection sqlConnection, string dbName)
{
try
{
using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_task_status";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("db_name", SqlDbType.VarChar).Value = dbName;
do
{
using var reader = command.ExecuteReader();
while (reader.Read())
{
if (reader.HasRows)
{
var s = new StringBuilder();
for (int i = 0; i < reader.FieldCount; i++)
{
s.AppendLine($"{i}={reader[i]}");
}
//LambdaLogger.Log(s.ToString());
var status = reader.GetString(5);
var id = reader.GetInt32(0);
var db = reader.GetString(2);
if ((status == "CREATED" || status == "IN_PROGRESS") && db == dbName)
{
return id;
}
}
Thread.Sleep(TimeSpan.FromSeconds(5));
}
} while (true);
throw new InvalidOperationException();
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
throw;
}
}
protected async Task BackupDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
InitialCatalog = info.DbCatalog,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var command = sqlConnection.CreateCommand();
command.CommandText = "msdb.dbo.rds_backup_database";
command.CommandType = CommandType.StoredProcedure;
command.Parameters.Add("source_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
command.Parameters.Add("s3_arn_to_backup_to", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.DbCatalog}{DateTime.Now:O}.bak";
command.Parameters.Add("overwrite_S3_backup_file", SqlDbType.TinyInt).Value = 1;
command.ExecuteNonQuery();
var taskId = this.GetTaskId(sqlConnection, info.DbCatalog);
//LambdaLogger.Log($"{nameof(taskId)}={taskId}");
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
protected async Task CreateDatabaseAsync(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true,
InitialCatalog = info.DbCatalog
};
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
try
{
sqlConnection.Open();
// already present - exit
return;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
}
}
// remove the catalog so we can connect to the server directly
sqlConnectionStringBuilder.InitialCatalog = string.Empty;
await using (var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString))
{
sqlConnection.Open();
await using var restoreCommand = sqlConnection.CreateCommand();
restoreCommand.CommandText = "msdb.dbo.rds_restore_database";
restoreCommand.CommandType = CommandType.StoredProcedure;
restoreCommand.Parameters.Add("restore_db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
restoreCommand.Parameters.Add("s3_arn_to_restore_from", SqlDbType.VarChar).Value =
$"{info.BackupBucket}/{info.FromCatalog}.bak";
restoreCommand.ExecuteNonQuery();
var taskId = GetTaskId(sqlConnection, info.DbCatalog);
do
{
if (IsTaskComplete(sqlConnection, taskId))
{
//LambdaLogger.Log("Complete");
break;
}
//LambdaLogger.Log("Sleeping...");
await Task.Delay(TimeSpan.FromSeconds(15));
} while (true);
}
// this might be redundant in a merge
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
await using var sqlConnection = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
try
{
sqlConnection.Open();
break;
}
catch (Exception exception)
{
//LambdaLogger.Log(exception.ToString());
await Task.Delay(TimeSpan.FromSeconds(5));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
sqlConnectionStringBuilder.InitialCatalog = info.DbCatalog;
do
{
try
{
await using var sqlConnection2 = new SqlConnection(sqlConnectionStringBuilder.ConnectionString);
sqlConnection2.Open();
break;
}
catch (Exception e)
{
//LambdaLogger.Log(e.ToString());
await Task.Delay(TimeSpan.FromSeconds(15));
}
} while (context.RemainingTime > TimeSpan.FromMinutes(1));
// this should already be in merged code
}
protected async Task DeleteDatabase(BackupRestoreDatabaseInfo info, ILambdaContext context)
{
if (string.Equals(info.DbCatalog.ToString(), "app", StringComparison.InvariantCultureIgnoreCase))
{
return;
}
var sqlConnectionStringBuilder = new Microsoft.Data.SqlClient.SqlConnectionStringBuilder
{
DataSource = info.DbServer,
UserID = info.DbUserId,
Password = info.DbPassword,
Authentication = SqlAuthenticationMethod.SqlPassword,
MultipleActiveResultSets = true
};
var connectionString = sqlConnectionStringBuilder.ConnectionString;
//LambdaLogger.Log($"{nameof(this.BackupDatabaseFunctionAsync)}:{nameof(connectionString)}:{connectionString}");
await using var sqlConnection = new SqlConnection(connectionString);
sqlConnection.Open();
await using var dropCommand = sqlConnection.CreateCommand();
dropCommand.CommandText = "msdb.dbo.rds_drop_database";
dropCommand.CommandType = CommandType.StoredProcedure;
dropCommand.Parameters.Add("db_name", SqlDbType.VarChar).Value = info.DbCatalog.ToString();
dropCommand.ExecuteNonQuery();
}
}

Assigns me no mail after user creation - Does Logic Apps make a mistake?

This is the case, for example, if a user now creates a page. So the next day I need Azure Logic apps to send an email after 1 day.
The problem is: right now, it is by no means sending any email to me if I sign up yesterday. But it sends me an email that now it has gone through with succe.
I would like to know what goes wrong since it by no means email me as I set up yesterday.
My Logic app (From Azure) - Images
However, be aware that the code can be made short but I just need to find out if Logic apps are making mistakes or if thus my code previously works without problems.
[Route("/api/cronjob")]
[HttpGet]
public async Task<IActionResult> NewSletterUserEmail()
{
await Newsletter();
return Ok("Godkendt!");
}
public async Task<IActionResult> Newsletter()
{
var m = new Settings.ArdklarMail();
var dtt = DateTime.Now;
var days = _dbContext.OfferUser.Max(i => i.Days);
var MaxDays = DateTime.Now.AddDays(-days);
var userlist = _dbContext.Users.Where(i => i.Opretdate >= MaxDays && i.TilmeldtNyhedsbrev == true).ToList();
if (userlist != null)
{
foreach (var item in userlist)
{
string mail = item.Brugernavn;
string fullname = item.Navn;
var memberData = _dbContext.MemberShipValue.FirstOrDefault(r => r.UserId == item.UserId);
if (memberData == null)
{
//alm bruger
var result = _dbContext.OfferUser.Where(x => x.Value == 1).ToList();
if (result != null)
{
foreach (var itemValue in result)
{
int itemValueDays = itemValue.Days;//hvis den ingen antal har så giver den 0.
var daysValue = DateTime.Now.AddDays(-itemValueDays);
if (item.Opretdate.Date == daysValue)
{
var title = itemValue.Title;
var viewModel = new EmailModel
{
getUrl = m.RemoveLinkUrl(),
Title = title,
FullName = fullname,
Text = itemValue.Text.ToHtmlString()
};
var resultMail = await _viewRenderService.RenderToStringAsync("~/Views/Templates/OfferToUsers.cshtml", viewModel);//return Null here
MailMessageControl mailA = new MailMessageControl();
mailA.SetCredentials(m.azureName(), m.password());
mailA.SetSender(m.mailFrom());
mailA.AddAddressSee(item.Brugernavn);
mailA.SetSubject(title);
mailA.SetBody(resultMail);
mailA.SendEmail();
await Task.Delay(2200);
}
}
}
}
else
{
var result = _dbContext.OfferUser.Where(x => x.Value == 2).ToList();
if (result != null)
{
foreach (var itemValue in result)
{
int itemValueDays = itemValue.Days;//hvis den ingen antal har så giver den 0.
var daysValue = DateTime.Now.AddDays(-itemValueDays);
if (item.Opretdate.Date == daysValue)
{
var title = itemValue.Title;
var viewModel = new EmailModel
{
getUrl = m.RemoveLinkUrl(),
Title = title,
FullName = fullname,
Text = itemValue.Text.ToHtmlString()
};
var resultMail = await _viewRenderService.RenderToStringAsync("~/Views/Templates/OfferToUsers.cshtml", viewModel);//return Null here
MailMessageControl mailA = new MailMessageControl();
mailA.SetCredentials(m.azureName(), m.password());
mailA.SetSender(m.mailFrom());
mailA.AddAddressSee(item.Brugernavn);
mailA.SetSubject(title);
mailA.SetBody(resultMail);
mailA.SendEmail();
await Task.Delay(2200);
}
}
}
}
}
}
//Det er til dem fra nyhedsbrevet som få tilsendt nyhedsbrev omkring div ting.
var newsletterlist = _dbContext.NewsletterList.Where(i => i.Tilmeldtdato >= MaxDays).ToList();
if (newsletterlist != null)
{
foreach (var item in newsletterlist)
{
string mail = item.Email;
string fullname = item.Email;
//til de nyhedsbrevet område
var result = _dbContext.OfferUser.Where(x => x.Value == 3).ToList();
if (result != null)
{
foreach (var itemValue in result)
{
int itemValueDays = itemValue.Days;
var daysValue = DateTime.Now.AddDays(-itemValueDays);
if (item.Tilmeldtdato.Date == daysValue)
{
var title = itemValue.Title;
var viewModel = new EmailModel
{
getUrl = m.RemoveLinkUrl(),
Title = title,
FullName = fullname,
Text = itemValue.Text.ToHtmlString()
};
var resultMail = await _viewRenderService.RenderToStringAsync("~/Views/Templates/OfferToUsers.cshtml", viewModel);
MailMessageControl mailA = new MailMessageControl();
mailA.SetCredentials(m.azureName(), m.password());
mailA.SetSender(m.mailFrom());
mailA.AddAddressSee(mail);
mailA.SetSubject(title);
mailA.SetBody(resultMail);
mailA.SendEmail();
await Task.Delay(3500);
}
}
}
}
}
return Ok("Godkendt!");
}
No, I don't see mistake in Azure Logic Apps. Since the Logic App uses a recurrence trigger, it triggers the Logic App at the defined interval of time.
As there was send email action both side on the parallel branch, it will somehow send you an email by no means.

How does one connect to the RootDSE and/or retrieve NetBiosDomain Name with System.DirectoryServices.Protocols?

In case of Directory Entry, one can connect and find the NetBios Domain name as follows :-
private string GetNetbiosDomainName(string dnsDomainName)
{
string netbiosDomainName = string.Empty;
DirectoryEntry rootDSE = new DirectoryEntry("LDAP://RootDSE");
string configurationNamingContext = rootDSE.Properties["configurationNamingContext"][0].ToString();
DirectoryEntry searchRoot = new DirectoryEntry("LDAP://cn=Partitions," + configurationNamingContext);
DirectorySearcher searcher = new DirectorySearcher(searchRoot);
//searcher.SearchScope = SearchScope.OneLevel;
searcher.PropertiesToLoad.Add("netbiosname");
searcher.Filter = string.Format("(&(objectcategory=Crossref)(dnsRoot={0})(netBIOSName=*))", dnsDomainName);
SearchResult result = searcher.FindOne();
if (result != null)
{
netbiosDomainName = result.Properties["netbiosname"][0].ToString();
}
return netbiosDomainName;
}
where dnsDomainName is a Fully qualified Domain name .
However, in case of System.DirectoryServices.Protocols , How can one connect and find such NetBios Domain name when fully qualified domain name is given ?
Here is the solution i have got in one research paper:-
private string GetDomainNetBios(string sDomainFqdn,NetworkCredential netCred)
{
string sNetBios=string.Empty;
LdapDirectoryIdentifier oLdapDirectory = null;
LdapConnection oLdapConnection = null;
try
{
oLdapDirectory = new LdapDirectoryIdentifier(sDomainFqdn, 389);
oLdapConnection = (netCred == null)
? new LdapConnection(oLdapDirectory)
: new LdapConnection(oLdapDirectory, netCred);
oLdapConnection.Timeout = TimeSpan.FromSeconds(45);
oLdapConnection.SessionOptions.TcpKeepAlive = true;
oLdapConnection.SessionOptions.ProtocolVersion = 3;
//prevents ldap connection from connecting to other servers during session
oLdapConnection.SessionOptions.ReferralChasing = ReferralChasingOptions.None;
oLdapConnection.AutoBind = false;
oLdapConnection.Bind();
SearchResponse dirRes = (SearchResponse)_ldapConnectionUsers.SendRequest(new
SearchRequest(
null,
"configurationNamingContext=*",
SearchScope.Base,
"configurationNamingContext"
));
if (dirRes != null)
{
string sConfPartDn =
dirRes.Entries[0].Attributes["configurationNamingContext"][0].ToString();
dirRes = (SearchResponse)_ldapConnectionUsers.SendRequest(new SearchRequest(
sConfPartDn,
String.Format(CultureInfo.InvariantCulture,"(&(nETBIOSName=*)(dnsRoot={0}))", sDomainFqdn),
SearchScope.Subtree,
"nETBIOSName"
));
}
if (dirRes != null && dirRes.Entries.Count > 0)
{
sNetBios = dirRes.Entries[0].Attributes["nETBIOSName"][0].ToString();
}
return sNetBios;
}
catch (Exception ex)
{
throw new Exception(string.Format(CultureInfo.InvariantCulture,"{0}::{1}", new StackFrame(0,
true).GetMethod().Name, PvssMgrException.ToString(ex)));
}
finally
{
oLdapConnection.Dispose();
}
}

value does not return null but it goes inside the condition for null value

i have a service that returns a json string: {"IsExisting": "true"} if the user is verified in the database. however if i put the correct credentials the code enters the null condition for response instead of the true condition. what could be the problem?
here's my controller.js code:
$scope.enterlogin = function(usern,pass)
{
console.log('username is = ' + usern);
loginService.getUser(usern,pass).then(function(response){
console.log('user is = ' + response);
var obj = JSON.parse(response);
console.log('object is ' + obj['IsExisting'] );
if(obj['IsExisiting'] == null)
{
alert('Account does not exist. Please check your credentials.');
}
else
{
if(localStorage['firstTimeLoad']!='TRUE')
{
$scope.hide();
localStorage['firstTimeLoad']='TRUE';
$state.go('helpExtra');
}
else
{
$scope.hide();
$state.go('menu.mainMenu');
}
}
});
}
here's my service code:
//angularSoap Login
.factory('loginService', ['$soap', function($soap){
var base_url = "http://localhost/UserService3/WebService1.asmx";
return {
getUser: function(usern,pass){
console.log('code side usern is = ' + usern + "" + pass);
return $soap.post(base_url, "getUserbyUsername", { uname: usern, passw: pass});
}
}
}])
and here's my webservice code:
[WebMethod]
public string getUserbyUsername(string uname, string passw)
{
string cs = "Data Source =.; Initial Catalog = UsersDB; Integrated Security = True";
using (SqlConnection con = new SqlConnection(cs))
{
SqlCommand cmd = new SqlCommand("spGetUserByUsername", con);
cmd.CommandType = CommandType.StoredProcedure;
SqlParameter parameter = new SqlParameter(#"Username", uname);
SqlParameter parameter2 = new SqlParameter(#"Password", passw);
cmd.Parameters.Add(parameter);
cmd.Parameters.Add(parameter2);
User user = new User();
con.Open();
SqlDataReader reader = cmd.ExecuteReader();
while (reader.Read())
{
// user.Username = reader["Username"].ToString();
// user.Password = reader["Password"].ToString();
user.IsExisting = reader["IsExisting"].ToString();
}
con.Close();
return new JavaScriptSerializer().Serialize(user);
}
}
The problem is with the spellings of the property you are trying to access on your object.
It should be
if(obj['IsExisting'] == null)
{
alert('Account does not exist. Please check your credentials.');
}
and not
if(obj['IsExisiting'] == null)

Google Drive - Authorize once on a single machine

I am new to Google Drive and have following scenarios for which I am not able to find anything (not sure if anything exists or not)
–> I am creating a Windows app which will be SAAS based. Different Users will register and create their company logins and subusers under them. Now I want them to put the google drive credentials in one of the form and this should work for rest of the users. Currently the problem is that while development I got the google log in done and it never asks for the login again but when testing on a different system with different login, it keeps asking for google login. I simply want admin users to put their google drive credentials and it should work for upload and download files for all the users for that company.
–> I want to keep versions of the same file (just like google drive does by default) on google drive. Lets say user A uploaded file xyz and then user B downloaded and changed file xyz and uploaded it on the drive again.
I want 2 things here – only the changed content should get uploaded and not the whole file (this will save time for the user)
2ndly I want to have history of the same file so I can show in my Windows application
#region Get Service Object
UserCredential credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
new ClientSecrets
{
ClientId = "GoogleDriveClientID",
ClientSecret = "GoogleDriveClientSecret"
},
new[] { DriveService.Scope.Drive }, "user", CancellationToken.None).Result;
// Create the service.
service = new DriveService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = "AppName",
});
#endregion
#region Uploading
public void uploadOnGoogleDrive(ObservableCollection<JobAttachments> AttachmentsColl, bool IsDocSaved)
{
try
{
service = getServiceObject();
List<Google.Apis.Drive.v2.Data.File> fileList = retrieveAllFiles(service);
List<Google.Apis.Drive.v2.Data.File> directoryList = GetDirectoryList(service);
if (IsDocSaved)
{
#region for checking if the file already exists
foreach (Google.Apis.Drive.v2.Data.File item in fileList)
{
foreach (JobAttachments attach in AttachmentsColl)
{
if (item.Title == attach.AttachmtGUID)
{
MessageBoxResult result = System.Windows.MessageBox.Show(LogMessages.GetResourceMessage(LogMessages.MessageEnumeration.GD_AlreadyExistsMsg), "Confirmation", MessageBoxButton.YesNoCancel);
if (result == MessageBoxResult.Yes)
{
//DeleteFile(service, item);
Google.Apis.Drive.v2.Data.File body = new Google.Apis.Drive.v2.Data.File();
body.Title = attach.AttachmtGUID;
body.MimeType = item.MimeType;
fileSize = body.FileSize;
byte[] byteArray = System.IO.File.ReadAllBytes(attach.AttachmentName);
System.IO.MemoryStream stream = new System.IO.MemoryStream(byteArray);
FilesResource.UpdateMediaUpload request = service.Files.Update(body, item.Id, stream, item.MimeType);
request.Upload();
}
else
{
return;
}
break;
}
}
}
#endregion
}
else
{
#region for direct uploading on google drive
if (AttachmentsCollection != null && AttachmentsCollection.Count > 0)
{
string folderID = string.Empty;
if (_IsProject)
{
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_ProjectName))
{
folderID = dir.Id;
break;
}
}
}
if (string.IsNullOrEmpty(folderID))
{
Google.Apis.Drive.v2.Data.File foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _ProjectName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = "root" } };
Google.Apis.Drive.v2.Data.File file = service.Files.Insert(foldbody).Execute();
folderID = file.Id;
}
}
else
{
//project folder
string prjFolder = string.Empty;
string tskFolder = string.Empty;
Google.Apis.Drive.v2.Data.File foldbody;
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_ProjectName))
{
prjFolder = dir.Id;
break;
}
}
}
if (string.IsNullOrEmpty(prjFolder))
{
foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _ProjectName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = "root" } };
Google.Apis.Drive.v2.Data.File file = service.Files.Insert(foldbody).Execute();
prjFolder = file.Id;
}
//task folder
if (directoryList != null && directoryList.Count > 0)
{
foreach (var dir in directoryList)
{
if (dir.Title.Equals(_TaskName) && dir.Parents[0].Id.Equals(prjFolder))
{
folderID = dir.Id;
break;
}
}
}
if (string.IsNullOrWhiteSpace(folderID))
{
foldbody = new Google.Apis.Drive.v2.Data.File();
foldbody.Title = _TaskName;
foldbody.MimeType = "application/vnd.google-apps.folder";
foldbody.Parents = new List<ParentReference>() { new ParentReference() { Id = prjFolder } };
Google.Apis.Drive.v2.Data.File file1 = service.Files.Insert(foldbody).Execute();
folderID = file1.Id;
}
}
foreach (JobAttachments item in AttachmentsColl)
{
if (!string.IsNullOrEmpty(item.AttachmentName))
{
Google.Apis.Drive.v2.Data.File body = new Google.Apis.Drive.v2.Data.File();
body.Title = item.AttachmtGUID;
body.MimeType = item.MimeType;
body.Parents = new List<ParentReference>() { new ParentReference() { Id = folderID } };
//fileSize = body.FileSize;
byte[] byteArray = System.IO.File.ReadAllBytes(item.AttachmentName);
System.IO.MemoryStream stream = new System.IO.MemoryStream(byteArray);
FilesResource.InsertMediaUpload request = service.Files.Insert(body, stream, item.MimeType);
request.Upload();
}
}
}
#endregion
}
}
catch (Exception ex)
{
if (ex.InnerException != null)
throw ex.InnerException;
}
}
#endregion
#region Download File
private async Task DownloadFile(DriveService service, string url, string title, long? fSize)
{
service = getServiceObject();
var downloader = new MediaDownloader(service);
//downloader.ChunkSize = 256 * 1024;
downloader.ProgressChanged += Download_ProgressChanged;
var fileName = string.Empty;
//for downloading on system
var SaveFileDialog = new SaveFileDialog();
SaveFileDialog.Title = "Save As";
SaveFileDialog.FileName = title;
Nullable<bool> result = SaveFileDialog.ShowDialog();
if (result == true)
fileName = SaveFileDialog.FileName;
else if (result == false)
{
prgrsBar.StyleSettings = new ProgressBarStyleSettings();
prgrsBar.Value = 0;
return;
}
else
{
if (Directory.Exists(#"\Downloads"))
fileName = #"\Downloads\" + title;
}
if (!string.IsNullOrWhiteSpace(fileName))
using (var fileStream = new System.IO.FileStream(fileName, System.IO.FileMode.Create, System.IO.FileAccess.Write))
{
fileSize = fSize;
var progress = await downloader.DownloadAsync(url, fileStream);
if (progress.Status.ToString() == DownloadStatus.Completed.ToString())
{
fName = fileStream.Name;
prgrsBar.StyleSettings = new ProgressBarStyleSettings();
prgrsBar.Value = 0;
fileStream.Flush();
}
if (progress.Status.ToString() == DownloadStatus.Failed.ToString())
{
HandleDocuments.IsEditButtonClicked = false;
MessageBox.Show("Failed......." + progress.Exception.Message);
}
}
}
#endregion
#region Delete File
private Task DeleteFile(DriveService service, Google.Apis.Drive.v2.Data.File file)
{
service = getServiceObject(); //comment this if calling from another function; create the service object in that function and pass it as parameter to this function.
service.Files.Delete(file.Id).ExecuteAsync();
service.Files.EmptyTrash();
return null;
}
#endregion
#region Get all Directories and Files from Google Drive
public List<Google.Apis.Drive.v2.Data.File> GetDirectoryList(DriveService service)
{
//Creating the global list
List<Google.Apis.Drive.v2.Data.File> AllDirectories = new List<Google.Apis.Drive.v2.Data.File>();
//setting up the Request.
FilesResource.ListRequest request = service.Files.List();
//MaxResults: How many we want back at a time max is 1000
request.MaxResults = 1000;
//Q: Search results. all i want are folders that havent been trashed (deleted)
request.Q = "mimeType='application/vnd.google-apps.folder' and trashed=false";
do
{
try
{
// getting the results
FileList files = request.Execute();
// adding the results to the list.
AllDirectories.AddRange(files.Items);
// If there are more results then your MaxResults you will have a nextPageToken to get the rest of the results.
request.PageToken = files.NextPageToken;
}
catch (Exception ex)
{
request.PageToken = null;
if (ex.InnerException != null)
throw ex.InnerException;
}
} while (!String.IsNullOrEmpty(request.PageToken));
List<Google.Apis.Drive.v2.Data.File> DirsInRoot = AllDirectories.Where(a => (a.Parents.Count > 0 && a.Parents.FirstOrDefault().IsRoot.HasValue) ? a.Parents.FirstOrDefault().IsRoot.Value : false).ToList<Google.Apis.Drive.v2.Data.File>();
List<string> HirearcyList = new List<string>();
// The first Dir is Root it doesnt get returned. But we need it if we
// Want to be able to list the files that are in the root dir.
HirearcyList.Add("Root");
// recersive magic here.
foreach (Google.Apis.Drive.v2.Data.File myDir in DirsInRoot)
{
HirearcyList.Add(" " + myDir.Title);
HirearcyList.AddRange(RecsiveDir(AllDirectories, myDir.Id, " "));
}
return AllDirectories;
}
public List<String> RecsiveDir(List<Google.Apis.Drive.v2.Data.File> allDirs, string ParentId, string Prefix)
{
List<string> result = new List<string>();
List<Google.Apis.Drive.v2.Data.File> DirsInParentId = allDirs.Where(a => (a.Parents.Count > 0 && a.Parents.FirstOrDefault().IsRoot.HasValue) ? a.Parents.FirstOrDefault().Id == ParentId : false).ToList<Google.Apis.Drive.v2.Data.File>();
foreach (Google.Apis.Drive.v2.Data.File myDir in DirsInParentId)
{
result.Add(Prefix + myDir.Title);
result.AddRange(RecsiveDir(allDirs, myDir.Id, Prefix + " "));
}
return result;
}
public static List<Google.Apis.Drive.v2.Data.File> retrieveAllFiles(DriveService service)
{
List<Google.Apis.Drive.v2.Data.File> result = new List<Google.Apis.Drive.v2.Data.File>();
FilesResource.ListRequest request = service.Files.List();
request.MaxResults = 1000;
do
{
try
{
FileList files = request.Execute();
result.AddRange(files.Items);
request.PageToken = files.NextPageToken;
//service.Revisions.List(files.Items[0].Id) // for getting the file Revision history
}
catch (Exception ex)
{
request.PageToken = null;
if (ex.InnerException != null)
throw ex.InnerException;
}
} while (!String.IsNullOrEmpty(request.PageToken));
return result;
}
#endregion
Thanks
Jatinder

Resources