I created an integration service project with three connection managers (which are defined in project level), now I have an SSIS package (.dtsx file) and I want to use it in my app.
So I use Microsoft.SqlServer.Dts namespace as below:
string pkgLocation;
Package pkg;
Microsoft.SqlServer.Dts.Runtime.Application app;
DTSExecResult pkgResults;
pkgLocation = #"D:\My Job\Tadbirgaran\Integration Services Project\MyPackage.dtsx";
app = new Microsoft.SqlServer.Dts.Runtime.Application();
pkg = app.LoadPackage(pkgLocation, null);
pkgResults = pkg.Execute();
but this code does not load project connection managers obviously, so is there any way that I can load connection manager files programmatically and add them to the package connection property? or should I define connections in my package in the integration service project?
You can use configurations to control the connection properties programmatically. Here is an example.
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.SqlServer.Dts.Runtime;
using Microsoft.SqlServer.Dts.Tasks.BulkInsertTask;
namespace configuration_API
{
class Program
{
static void Main(string[] args)
{
// Create a package and set two properties.
Package pkg = new Package();
pkg.EnableConfigurations = true;
pkg.ExportConfigurationFile(#"C:\conf.xml");
// Create a variable object and add it to the
// package Variables collection.
Variable varPkg = pkg.Variables.Add("var", false, "", 100);
varPkg.Value = 1;
string packagePathToVariable = varPkg.GetPackagePath();
// Create a configuration object and add it to the
// package configuration collection.
Configuration config = pkg.Configurations.Add();
// Set properties on the configuration object.
config.ConfigurationString = "conf.xml";
config.Description = "My configuration description";
config.ConfigurationType = DTSConfigurationType.ConfigFile;
config.PackagePath = packagePathToVariable;
// Save the package and its configuration.
Application app = new Application();
app.SaveToXml(#"c:\pkg.xml", pkg, null);
// Reload the package.
Package p1 = app.LoadPackage(#"c:\pkg.xml", null);
// Review the configuration information.
Configurations configs_After = pkg.Configurations;
foreach(Configuration confAfter in configs_After)
{
Console.WriteLine("ConfigurationString is {0}", confAfter.ConfigurationString);
Console.WriteLine("ConfigurationType is {0}", confAfter.ConfigurationType);
Console.WriteLine("CreationName is {0}", confAfter.CreationName);
Console.WriteLine("Description is {0}", confAfter.Description);
Console.WriteLine("Assigned ID is {0}", confAfter.ID);
Console.WriteLine("Name is {0}", confAfter.Name);
}
}
}
}
I ended up loading the project instead of package and setting connections from project to package as below:
private void SSISLoadData()
{
Project ssisProject = null;
DTSExecResult pkgResults;
try
{
ssisProject = Project.OpenProject(#"D:\My Job\TDB\Integration Services Project\bin\Development\Integration Services Project.ispac");
Package pkg = ssisProject.PackageItems[0].LoadPackage(null);
for (int i = 0; i < ssisProject.ConnectionManagerItems.Count; i++)
pkg.Connections.Join(ssisProject.ConnectionManagerItems[i].ConnectionManager);
pkg.Connections[0].ConnectionString = dataFolderPath + "\\*.csv";
pkg.Connections[1].ConnectionString = string.Format("Data Source =.; Initial Catalog = TDB; Provider = SQLNCLI11.1; Integrated Security = SSPI; Initial File Name = {0};", dbPath);
pkg.Connections[2].ConnectionString = string.Format("Data Source =.; Initial Catalog = TDBRawData; Provider = SQLNCLI11.1; Integrated Security = SSPI; Initial File Name = {0}\\TDBRawData.mdf;", Environment.CurrentDirectory);
pkgResults = pkg.Execute();
MessageBox.Show(pkgResults.ToString());
}
catch (Exception e)
{
MessageBox.Show(e.Message);
}
finally
{
if (ssisProject != null)
ssisProject.Dispose();
}
}
I don't know if there is a better solution for this problem, but it works.
Related
I have just started to use FluentMigration for my current project. I wrote my first migration but I have some trouble writing a unit test for it.
Here is some sample code:
private ServiceProvider CreateServiceProvider()
{
return new ServiceCollection()
.AddLogging(lb => lb.AddFluentMigratorConsole())
.AddFluentMigratorCore()
.ConfigureRunner(
builder => builder
.AddSQLite()
.WithGlobalConnectionString("Data Source=:memory:;Version=3;New=True;")
.WithMigrationsIn(typeof(MigrationOne).Assembly))
.BuildServiceProvider();
}
private void PerformMigrateUp(IServiceScope scope)
{
var runner = scope.ServiceProvider.GetRequiredService<IMigrationRunner>();
runner.MigrateUp(1);
}
[Test]
public void ShouldHaveTablesAfterMigrateUp()
{
var provider = this.CreateServiceProvider();
using (var scope = provider.CreateScope())
{
this.PerformMigrateUp(scope);
// here I'd like to test if tables have been created in the database by the migration
}
}
I don't know how (or if it is possible) to access the current database connection so I can perform a query. Any suggestions would be helpful. Thanks.
Ok, I found a solution. I have to use the Process method of the runner's processor to perform my own sql query.
It looks like this:
private ServiceProvider CreateServiceProvider()
{
return new ServiceCollection()
.AddLogging(lb => lb.AddFluentMigratorConsole())
.AddFluentMigratorCore()
.ConfigureRunner(
builder => builder
.AddSQLite()
.WithGlobalConnectionString(#"Data Source=:memory:;Version=3;New=True;")
.WithMigrationsIn(typeof(MigrationDate20181026113000Zero).Assembly))
.BuildServiceProvider();
}
[Test]
public void ShouldHaveNewVersionAfterMigrateUp()
{
var serviceProvider = this.CreateServiceProvider();
var scope = serviceProvider.CreateScope();
var runner = scope.ServiceProvider.GetRequiredService<IMigrationRunner>();
runner.MigrateUp(1);
string sqlStatement = "SELECT Description FROM VersionInfo";
DataSet dataSet = runner.Processor.Read(sqlStatement, string.Empty);
Assert.That(dataSet, Is.Not.Null);
Assert.That(dataSet.Tables[0].Rows[0].ItemArray[0], Is.EqualTo("Migration1"));
}
This is an old question but an important one. I find it strange that I couldnt find any documentation on this.
In any case here is my solution which I find to be a bit better as you dont need to rely on the runner. Since you dont need that the options open up hugely for constructor arguments.
Firstly make sure you install Microsoft.Data.Sqlite or you will get a strange error.
SQLite in memory databases exist for as long as the connection does - and 1 database per connection on first glance. Actually though there is a way to share the database between connections as long as at least 1 connection is open at all times according to my experiments. You just need to name it.
https://learn.microsoft.com/en-us/dotnet/standard/data/sqlite/connection-strings#sharable-in-memory
So to begin with I created a connection that will stay open until the test finishes. It will be named using Guid.NewGuid() so that subsequent connections will work as expected.
var dbName = Guid.NewGuid().ToString();
var connectionString = $"Data Source={dbName};Mode=Memory;Cache=Shared";
var connection = new SqliteConnection(connectionString);
connection.Open();
After that the crux of running the migrations is the same as previously answered but the connection string uses the named database:
var sp = services.AddFluentMigratorCore()
.ConfigureRunner(fluentMigratorBuilder => fluentMigratorBuilder
.AddSQLite()
.WithGlobalConnectionString(connectionString)
.ScanIn(AssemblyWithMigrations).For.Migrations()
)
.BuildServiceProvider();
var runner = sp.GetRequiredService<IMigrationRunner>();
runner.MigrateUp();
Here is a class I use to inject a connection factory everywhere that needs to connect to the database for normal execution:
internal class PostgresConnectionFactory : IConnectionFactory
{
private readonly string connectionString;
public PostgresConnectionFactory(string connectionString)
{
this.connectionString = connectionString;
}
public DbConnection Create()
{
return new NpgsqlConnection(connectionString);
}
}
I just replaced this (all hail dependency inversion) with:
internal class InMemoryConnectionFactory : IConnectionFactory
{
private readonly string connectionstring;
public InMemoryConnectionFactory(string connectionstring)
{
this.connectionstring = connectionstring;
}
public DbConnection Create()
{
return new SqliteConnection(connectionstring);
}
}
where the connection string is the same named one I defined above.
Now you can simply use that connection factory anywhere that needs to connect to the same in memory database, and since we can now connect multiple times possibilities for integration testing open up.
Here is the majority of my implementation:
public static IDisposable CreateInMemoryDatabase(Assembly AssemblyWithMigrations, IServiceCollection services = null)
{
if (services == null)
services = new ServiceCollection();
var connectionString = GetSharedConnectionString();
var connection = GetPersistantConnection(connectionString);
MigrateDb(services, connectionString, AssemblyWithMigrations);
services.AddSingleton<IConnectionFactory>(new InMemoryConnectionFactory(connectionString));
return services.BuildServiceProvider()
.GetRequiredService<IDisposableUnderlyingQueryingTool>();
}
private static string GetSharedConnectionString()
{
var dbName = Guid.NewGuid().ToString();
return $"Data Source={dbName};Mode=Memory;Cache=Shared";
}
private static void MigrateDb(IServiceCollection services, string connectionString, Assembly assemblyWithMigrations)
{
var sp = services.AddFluentMigratorCore()
.ConfigureRunner(fluentMigratorBuilder => fluentMigratorBuilder
.AddSQLite()
.WithGlobalConnectionString(connectionString)
.ScanIn(assemblyWithMigrations).For.Migrations()
)
.BuildServiceProvider();
var runner = sp.GetRequiredService<IMigrationRunner>();
runner.MigrateUp();
}
private static IDbConnection GetPersistantConnection(string connectionString)
{
var connection = new SqliteConnection(connectionString);
connection.Open();
return connection;
}
Then here is a sample test:
public Test : IDisposable {
private readonly IDisposable _holdingConnection;
public Test() {
_holdingConnection = CreateInMemoryDatabase(typeof(MyFirstMigration).Assembly);
}
public void Dispose() {
_holdingConnection.Dispose();
}
}
You may notice that the static factory returns a custom interface. Its just an interface that extends the normal tooling I inject to repositories, but also implements IDisposable.
Untested bonus for integration testing where you will have a service collection created via WebApplicationFactory or TestServer etc:
public void AddInMemoryPostgres(Assembly AssemblyWithMigrations)
{
var lifetime = services.BuildServiceProvider().GetService<IHostApplicationLifetime>();
var holdingConnection= InMemoryDatabaseFactory.CreateInMemoryDapperTools(AssemblyWithMigrations, services);
lifetime.ApplicationStopping.Register(() => {
holdingConnection.Dispose();
});
}
How do I create a database in PervasiveSQL using the command line.
I know how to do it via Control Center, but I would rather create it via the command line. I am working to automate the standup of a PervasiveSQL box for a project I am working on. I have the server install happening silently and I am adjusting the Server Configuration using a RegKey import.
Now i just need to script the creation of the database. The new database will use existing database files which are already copied to the server.
In the documentation I am using found here: there is a utility called dbMaint (page 264) which seems like it would do the job, but I do not seem to have that tool on my server.
Thank you in advance for your help.
dbMaint is only provided for PSQL on Linux. There is a way to write a utility using the Distributed Tuning Interface (DTI) or Distributed Tuning Object (DTO) to create the database. I can't link to the PSQL documentation but there is a PSQL_DTI_GUIDE.pdf and PSQL_DTO_Guide.pdf in the PSQL v11 documentation download that describes how to use those APIs.
Found a C# sample I put together a while back. The Pervasive DTO library will need to be added as a reference. It's a COM object. The simple sample is:
using System;
using DTOLib;
namespace dtoTest
{
class Class1
{
[STAThread]
static void Main(string[] args)
{
string compName = null;
string userName = null;
string password = null;
string dbname = null;
string ddflocation = null;
string datalocation = null;
dtoDbFlags dbflags = DTOLib.dtoDbFlags.dtoDbFlagDefault;
DTOLib.dtoResult result;
if (args.LongLength < 1)
{
Console.WriteLine("Invalid options.\n");
Console.WriteLine("Usage: dtoDBN.EXE <computername> <username> <password> <dbname> <ddf location> <data location> <DBFlage>");
Console.WriteLine("NOTE: locations must be relative to the computer where the PSQL engine is running.");
Console.WriteLine("DB Flags must be passed as integer in this example with these values: ");
Console.WriteLine(" P_DBFLAG_BOUND = 1; (* bound database - must have P_DBFLAG_CREATE_DDF too *)");
Console.WriteLine(" P_DBFLAG_RI = 2; (*relational integrity *)");
Console.WriteLine(" P_DBFLAG_CREATE_DDF = 4; (*create ddf flag *)");
Console.WriteLine(" P_DBFLAG_NA = 2147483648; (*not applicable *)");
Console.WriteLine(" P_DBFLAG_DEFAULT = (P_DBFLAG_BOUND or P_DBFLAG_RI); ");
return;
}
if (args.LongLength == 7)
{
compName = args[0].ToString();
userName = args[1].ToString();
password = args[2].ToString();
dbname = args[3].ToString();
ddflocation = args[4].ToString();
datalocation = args[5].ToString();
dbflags = (dtoDbFlags)Convert.ToInt32(args[6]);
}
Console.WriteLine("Create Pervasive Database using DTO and C#");
DtoSession mDtoSession = new DTOLib.DtoSession();
try
{
result = mDtoSession.Connect(compName, userName, password);
if (result != 0)
{
Console.WriteLine("Error connecting to server. Error code:");
}
else
{
//Create a Database name here.
DtoDatabase db = new DtoDatabase();
db.Name = dbname;
db.DdfPath = ddflocation;
db.DataPath = datalocation;
db.Flags = dbflags;
result = mDtoSession.Databases.Add(db);
if (result !=0)
{
Console.WriteLine(string.Format("Error creating the datbase ({0}). Error code: {1}", dbname, result.ToString()));
}
else
{
Console.WriteLine(string.Format("Database ({0}) created. ", dbname));
}
result = mDtoSession.Disconnect();
Console.ReadLine();
}
}
catch (Exception e1)
{
Console.WriteLine(e1.Message.ToString());
}
}
}
}
I fail to reload my resource bundle class to reflect the changed translations (made my end-user) on page. Although getContent method executes and all translations as key/value fetched from database and object[][] returned from getContent method successfully. this happens after each time I clear the cache and refresh the jsf page through actionListener.
ResourceBundle.clearCache();
Also I tried to use the below and got the same result.
ResourceBundle.clearCache(Thread.currentThread().GetContextClassLoader());
Why WLS always see the old one? Am I miss something?
versions: 12.2.1.1.0 and 12.2.1.3.0
The end user - after making the translations and contributing to the internationalization of the project, the translations are saved to the database,
The process to inforce these operations are done through the following steps:
Create a HashMap and load all the resource key/vale pairs in the map
from the database:
while (rs.next()) {
bundle.put(rs.getString(1), rs.getString(2));
}
Refresh the Bundle of your application
SoftCache cache =
(SoftCache)getFieldFromClass(ResourceBundle.class,
"cacheList");
synchronized (cache) {
ArrayList myBundles = new ArrayList();
Iterator keyIter = cache.keySet().iterator();
while (keyIter.hasNext()) {
Object key = keyIter.next();
String name =
(String)getFieldFromObject(key, "searchName");
if (name.startsWith(bundleName)) {
myBundles.add(key);
sLog.info("Resourcebundle " + name +
" will be refreshed.");
}
}
cache.keySet().removeAll(myBundles);
Getthe a String from ResourceBoundle of your application:
for (String resourcebundle : bundleNames) {
String bundleName =
resourcebundle + (bundlePostfix == null ? "" : bundlePostfix);
try {
bundle = ResourceBundle.getBundle(bundleName, locale, getCurrentLoader(bundleName));
} catch (MissingResourceException e) {
// bundle with this name not found;
}
if (bundle == null)
continue;
try {
message = bundle.getString(key);
if (message != null)
break;
} catch (Exception e) {
}
}
I am trying to find newly created tables in the target database on publishing. Using DAC Fx I am able to find the differences and delete the tables after moving the newly created table to another db.
I developed and tested the code with IntegratedSecurity. Started failing on machines with SQLServer logins.
The moment I toggle the IntegratedSecurity to true it works. Is it a bug?
private void Analyse()
{
try
{
var sourceDacpac = new SchemaCompareDacpacEndpoint(DacPacSrc);
var csb = new SqlConnectionStringBuilder(ConnectionString);
csb.IntegratedSecurity = false;
var targetDatabase =new SchemaCompareDatabaseEndpoint(csb.ToString());
var comparison = new SchemaComparison(sourceDacpac, targetDatabase);
comparison.Options.DropObjectsNotInSource = true;
var result = comparison.Compare();
if (result.GetErrors().Any())
{
throw new Exception("Compare failed " + result.GetErrors().FirstOrDefault().Message);
}
var delta = new List<string>();
if (result.Differences != null && result.Differences.Any())
{
var deltaTables = result.Differences.Where(x => x.Name == "Table" && x.UpdateAction == SchemaUpdateAction.Delete);
delta = deltaTables.Select(x => x.TargetObject.Name.ToString()).ToList();
}
FindingDeltaCompleted?.Invoke(this, new DeltaEventArgs(delta));
}
catch (Exception ex)
{
Logging.HandleException(ex);
}
}
Try setting Persist Security Info=True in the SQL Authentication connection string.
SSDT/DAC Fx saves connection strings in registry under HKEY_CURRENT_USER\SOFTWARE\Microsoft\SSDT\ConnectionStrings. When Persist Security Info=True is not set, it won't restore the password when loading the connection strings from registry.
When I run this package from Visual Studio, it works fine, but when I run it from the sql agent job (SQL Server 2012 sp1) it throws error on the Script Task . I am running it under a proxy account in the sql agent job.
Error: Source: Set FS File Parameters Script Task Description: Exception has been thrown by the target of an invocation.
The proxy account is configured for the following subsystems:
ActiveX Script
SQL Server Analysis Services Command
SQL Server Analysis Services Query
SQL Server Analysis Services Package
PowerShell
I guess it is a problem with the proxy account using System.IO, because all other packages that do not access the file system are running fine, even though they have script tasks. All file path variables have been set up with UNC paths. The folder and files have everyone full control configuration.
How do I set it up to run from the sql agent job?
How do I check to make sure proxy account has access to file system?
Here is the code in the script task:
#region Namespaces
using System;
using System.Data;
using Microsoft.SqlServer.Dts.Runtime;
using System.Windows.Forms;
using System.IO;
#endregion
public void Main()
{
Dts.Variables["User::AS_FileArchivePath"].Value = "";
Dts.Variables["User::ExcludeProvidersArchivePath"].Value = "";
Dts.Variables["User::AS_FilePath"].Value = "";
Dts.Variables["User::ExcludeProvidersFilePath"].Value = "";
Dts.Variables["User::FeeScheduleFileName"].Value = "";
Dts.Variables["User::FileArchivePath"].Value = "";
//get load file name
String dirPath = Dts.Variables["User::FileDropFolder"].Value.ToString();
String fileExt = Dts.Variables["User::LoadFileExt"].Value.ToString();
String FileArchivePath = Dts.Variables["User::FileArchiveFolder"].Value.ToString() + Dts.Variables["User::FileArchiveDateFolder"].Value.ToString();
String FileName = "";
String FileType = "";
int FileSize = 0;
DirectoryInfo dir = new DirectoryInfo(dirPath);
foreach (FileInfo file in dir.GetFiles())
{
if (file.Extension.Contains(fileExt)
&& file.Name.StartsWith("DoNotDeleteTemplate") == false
&& file.Name.Contains("Products") == true
&& file.Name.Contains("Special") == false
&& file.Name.Contains("Exclude") == false)
{
FileName = file.Name;
FileSize = (int)file.Length;
FileType = file.Extension;
}
}
if (FileName != "")
{
Dts.Variables["User::FeeScheduleFileName"].Value = FileName;
Dts.Variables["User::FeeScheduleFileSize"].Value = FileSize;
Dts.Variables["User::FeeScheduleFileType"].Value = FileType;
//create archive folder
bool folderExists = Directory.Exists(FileArchivePath);
if (!folderExists)
Directory.CreateDirectory(FileArchivePath);
//set full archive path
Dts.Variables["User::FileArchivePath"].Value = FileArchivePath + "\\" + FileName;
//set full load file path
String filePath = Dts.Variables["User::FileDropFolder"].Value.ToString() + FileName;
}
Dts.TaskResult = (int)ScriptResults.Success;
}