Dynamics 365 Business Central terraform scopes - azure-active-directory

I am trying to create a terraform script that will register an application in Azure AD.
I have been successful when generating a script that only reads from Microsoft Graph scopes. But I am having trouble figuring out what the equivalent of those scopes are in Business Central (Cloud version).
For Microsoft Graph I have these permissions:
email
offline_access
openid
profile
Financials.ReadWrite.All
User.Read
And I read them like this in terraform:
provider "azuread" {
# Whilst version is optional, we /strongly recommend/ using it to pin the version of the Provider being used
version = "~> 0.10"
subscription_id = var.subscription_id
}
data "azuread_service_principal" "graph-api" {
display_name = "Microsoft Graph"
}
locals {
MAIL_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("email"))[0]}"
USER_READ_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("User.Read"))[0]}"
FINANCIALS_READ_WRITE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("Financials.ReadWrite.All"))[0]}"
OFFLINE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("offline_access"))[0]}"
OPENID_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("openid"))[0]}"
PROFILE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("profile"))[0]}"
}
Which seems to be working fine. I am just struggling to find the similar way of doing this for the Dynamics 365 Business Central
I am interested in these:
app_access
Financials.ReadWrite.All
user_impersonation
Does anybody know what that endpoint might look like? The documentation is very limited.
EDIT:
This is the final script for anybody interested in setting up an Business Central application registration
variable "subscription_id" {
type = string
}
variable "app_name" {
type = string
}
variable "app_homepage" {
type = string
}
variable "app_reply_urls" {
type = list(string)
}
provider "azuread" {
# Whilst version is optional, we /strongly recommend/ using it to pin the version of the Provider being used
version = "~> 0.10"
subscription_id = var.subscription_id
}
data "azuread_service_principal" "graph-api" {
display_name = "Microsoft Graph"
}
data "azuread_service_principal" "d365bc" {
display_name = "Dynamics 365 Business Central"
}
locals {
APP_ACCESS_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.app_roles.*.id, data.azuread_service_principal.d365bc.app_roles.*.value, list("app_access"))[0]}"
USER_IMPERSONATION_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.oauth2_permissions.*.id, data.azuread_service_principal.d365bc.oauth2_permissions.*.value, list("user_impersonation"))[0]}"
BC_FINANCIALS_READ_WRITE_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.oauth2_permissions.*.id, data.azuread_service_principal.d365bc.oauth2_permissions.*.value, list("Financials.ReadWrite.All"))[0]}"
GRAPH_FINANCIAL_READ_WRITE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("Financials.ReadWrite.All"))[0]}"
MAIL_READ_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("User.Read"))[0]}"
MAIL_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("email"))[0]}"
OFFLINE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("offline_access"))[0]}"
OPENID_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("openid"))[0]}"
PROFILE_PERMISSION = "${matchkeys(data.azuread_service_principal.graph-api.oauth2_permissions.*.id, data.azuread_service_principal.graph-api.oauth2_permissions.*.value, list("profile"))[0]}"
}
resource "azuread_application" "businessCentral" {
name = var.app_name
homepage = var.app_homepage
identifier_uris = []
reply_urls = var.app_reply_urls
available_to_other_tenants = true
type = "webapp/api"
required_resource_access {
resource_app_id = data.azuread_service_principal.graph-api.application_id
resource_access {
id = local.GRAPH_FINANCIAL_READ_WRITE_PERMISSION
type = "Scope"
}
resource_access {
id = local.MAIL_PERMISSION
type = "Scope"
}
resource_access {
id = local.MAIL_READ_PERMISSION
type = "Scope"
}
resource_access {
id = local.OFFLINE_PERMISSION
type = "Scope"
}
resource_access {
id = local.OPENID_PERMISSION
type = "Scope"
}
resource_access {
id = local.PROFILE_PERMISSION
type = "Scope"
}
}
required_resource_access {
resource_app_id = data.azuread_service_principal.d365bc.application_id
resource_access {
id = local.APP_ACCESS_PERMISSION
type = "Role"
}
resource_access {
id = local.USER_IMPERSONATION_PERMISSION
type = "Scope"
}
resource_access {
id = local.BC_FINANCIALS_READ_WRITE_PERMISSION
type = "Scope"
}
}
app_role {
allowed_member_types = [
"Application"
]
description = "Admins can manage roles and perform all task actions"
display_name = "Admin"
is_enabled = true
value = "Admin"
}
}
One thing to note is that the app_access is Role and the rest of the API permissions are Scope.
You can call the above terraform with:
terraform plan -var="subscription_id={your_scription_id}" -var='app_reply_urls={your_urls_array}' -var="app_name={your_app_name}" -var="app_homepage={your_app_homepage}"

Try this:
provider "azuread" {
# Whilst version is optional, we /strongly recommend/ using it to pin the version of the Provider being used
version = "=0.10.0"
}
data "azuread_service_principal" "d365bc" {
application_id = "996def3d-b36c-4153-8607-a6fd3c01b89f"
}
locals {
APP_ACCESS_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.app_roles.*.id, data.azuread_service_principal.d365bc.app_roles.*.value, list("app_access"))[0]}"
USER_IMPERSONATION_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.oauth2_permissions.*.id, data.azuread_service_principal.d365bc.oauth2_permissions.*.value, list("user_impersonation"))[0]}"
FINANCIALS_READ_WRITE_PERMISSION = "${matchkeys(data.azuread_service_principal.d365bc.oauth2_permissions.*.id, data.azuread_service_principal.d365bc.oauth2_permissions.*.value, list("Financials.ReadWrite.All"))[0]}"
}
996def3d-b36c-4153-8607-a6fd3c01b89f is the client id of Microsoft Dynamics 365 BC service principal.
app_access is app permission so we need to use "app_roles" rather than "oauth2_permissions" here.

Related

How to use terraform to enable Managed private endpoint on datafactory azure sql database linked service

I am trying to use terraform to create adf linked services however the terraform resource doesn't give the option to select an already existing managed private endpoint for the linked service to communicate over but when creating from the portal, this is possible. bellow is my code
resource "azurerm_data_factory" "process-adf" {
resource_group_name = module.resourcegroup.resource_group.name
location = module.resourcegroup.resource_group.location
name = "adf"
managed_virtual_network_enabled = true
public_network_enabled = false
tags = var.tags
identity {
type = "SystemAssigned"
}
}
resource "azurerm_data_factory_linked_service_azure_sql_database" "process-mssql-adf" {
name = "mssql-adf"
data_factory_id = azurerm_data_factory.process-adf.id
integration_runtime_name = azurerm_data_factory_integration_runtime_azure.adf.id
connection_string = "data source=servername;initial catalog=databasename;user id=admin;Password=password;integrated security=True;encrypt=True;connection timeout=30"
}
resource "azurerm_data_factory_managed_private_endpoint" "adf-msssql-pe" {
name = "adf"
data_factory_id = azurerm_data_factory.process-adf.id
target_resource_id = azurerm_mssql_server.process-control.id
subresource_name = "sqlServer"
}
resource "azurerm_data_factory_integration_runtime_azure" "adf" {
name = "adf"
data_factory_id = azurerm_data_factory.process-adf.id
location = module.resourcegroup.resource_group.location
virtual_network_enabled = true
}
how do i point the resource azurerm_data_factory_linked_service_azure_sql_database to the resource azurerm_data_factory_managed_private_endpoint ?

Multiple certificates for Issuer ITfoxtec.Identity.Saml2

As context: I am trying to implement SAML2.0 authentication using ITfoxtec.Identity.Saml2 library. I want to use multiple certificates for one Service Provider, because different clients could login to Service Provider and each of them can have its own certificate. I need a third-party login service have possibility to choose among the list of certificates from my Service Provider metadata.xml when SAML request happened. Does ITfoxtec.Identity.Saml2 library support this possibility or are there some workarounds how it can be implemented?. Thank You
You would normally have one Saml2Configuration. But in your case I would implement some Saml2Configuration logic, where I can ask for a specific Saml2Configuration with the current certificate (SigningCertificate/DecryptionCertificate). This specific Saml2Configuration is then used in the AuthController.
The metadata (MetadataController) would then call the Saml2Configuration logic to get a list of all the certificates.
Something like this:
public class MetadataController : Controller
{
private readonly Saml2Configuration config;
private readonly Saml2ConfigurationLogic saml2ConfigurationLogic;
public MetadataController(IOptions<Saml2Configuration> configAccessor, Saml2ConfigurationLogic saml2ConfigurationLogic)
{
config = configAccessor.Value;
this.saml2ConfigurationLogic = saml2ConfigurationLogic;
}
public IActionResult Index()
{
var defaultSite = new Uri($"{Request.Scheme}://{Request.Host.ToUriComponent()}/");
var entityDescriptor = new EntityDescriptor(config);
entityDescriptor.ValidUntil = 365;
entityDescriptor.SPSsoDescriptor = new SPSsoDescriptor
{
WantAssertionsSigned = true,
SigningCertificates = saml2ConfigurationLogic.GetAllSigningCertificates(),
//EncryptionCertificates = saml2ConfigurationLogic.GetAllEncryptionCertificates(),
SingleLogoutServices = new SingleLogoutService[]
{
new SingleLogoutService { Binding = ProtocolBindings.HttpPost, Location = new Uri(defaultSite, "Auth/SingleLogout"), ResponseLocation = new Uri(defaultSite, "Auth/LoggedOut") }
},
NameIDFormats = new Uri[] { NameIdentifierFormats.X509SubjectName },
AssertionConsumerServices = new AssertionConsumerService[]
{
new AssertionConsumerService { Binding = ProtocolBindings.HttpPost, Location = new Uri(defaultSite, "Auth/AssertionConsumerService") }
},
AttributeConsumingServices = new AttributeConsumingService[]
{
new AttributeConsumingService { ServiceName = new ServiceName("Some SP", "en"), RequestedAttributes = CreateRequestedAttributes() }
},
};
entityDescriptor.ContactPerson = new ContactPerson(ContactTypes.Administrative)
{
Company = "Some Company",
GivenName = "Some Given Name",
SurName = "Some Sur Name",
EmailAddress = "some#some-domain.com",
TelephoneNumber = "11111111",
};
return new Saml2Metadata(entityDescriptor).CreateMetadata().ToActionResult();
}
private IEnumerable<RequestedAttribute> CreateRequestedAttributes()
{
yield return new RequestedAttribute("urn:oid:2.5.4.4");
yield return new RequestedAttribute("urn:oid:2.5.4.3", false);
}
}

How can i establish rpc properties with the datasource type DB in Corda community edition?

To establish an RPC connection in the community edition we need to specify the rpc username, password and permissions but when we are integrating external database like MySQL and change the datasource type from INMEMORY to "DB" it does not allows to give user properties.
these are the settings I am using in my node.conf
security = {
authService = {
dataSource = {
type = "DB"
passwordEncryption = "SHIRO_1_CRYPT"
connection = {
jdbcUrl = "jdbc:mysql://localhost:3306"
username = "root"
password = "password"
driverClassName = "com.mysql.jdbc.Driver"
}
}
options = {
cache = {
expireAfterSecs = 120
maxEntries = 10000
}
}
}
Maybe I didn't understand your question, but database setup in node.conf is separate from RPC user setup in node.conf:
Database (PostGres in my case)
extraConfig = [
'dataSourceProperties.dataSourceClassName' : 'org.postgresql.ds.PGSimpleDataSource',
'dataSourceProperties.dataSource.url' : 'jdbc:postgresql://localhost:5432/postgres',
'dataSourceProperties.dataSource.user' : 'db_user',
'dataSourceProperties.dataSource.password' : 'db_user_password',
'database.transactionIsolationLevel' : 'READ_COMMITTED',
'database.initialiseSchema' : 'true'
]
RPC User
rpcUsers = [[ user: "rpc_user", "password": "rpc_user_password", "permissions": ["ALL"]]]
Ok, I'm adding my node's node.config (it's part of Corda TestNet, and it's deployed on Google Cloud):
baseDirectory = "."
compatibilityZoneURL = "https://netmap.testnet.r3.com"
emailAddress = "xxx"
jarDirs = [ "plugins", "cordapps" ]
sshd { port = 2222 }
myLegalName = "OU=xxx, O=TESTNET_xxx, L=London, C=GB"
keyStorePassword = "xxx"
trustStorePassword = "xxx"
crlCheckSoftFail = true
database = {
transactionIsolationLevel = "READ_COMMITTED"
initialiseSchema = "true"
}
dataSourceProperties {
dataSourceClassName = "org.postgresql.ds.PGSimpleDataSource"
dataSource.url = "jdbc:postgresql://xxx:xxx/postgres"
dataSource.user = xxx
dataSource.password = xxx
}
p2pAddress = "xxx:xxx"
rpcSettings {
useSsl = false
standAloneBroker = false
address = "0.0.0.0:xxx"
adminAddress = "0.0.0.0:xxx"
}
rpcUsers = [
{ username=cordazoneservice, password=xxx, permissions=[ ALL ] }
]
devMode = false
cordappSignerKeyFingerprintBlacklist = []
useTestClock = false

terraform database creation in athena

I am trying to create a database using terraform and this seems very complicated for a poor query...
Could you help me, please?
I have tried null_resource with local-exec and data "external" Python...
I think I am looking the wrong way
ex which doesn't works in terraform 0.12
resource "null_resource" "create-endpoint" {
provisioner "local-exec" {
query = <<EOF
{
CREATE EXTERNAL TABLE `dashboard_loading_time`(
`timestamp_iso` string,
`app_identification` struct<service:string,app_name:string,app_type:string,stage:string>,
`user` struct<api_gateway_key:struct<id:string,name:string>,mashery_key:struct<id:string,name:string>,employee:struct<id:string,name:string>>,
`action` struct<action_type:string,path:string>,
`result` struct<status:string,http_status:string,response:struct<response:string>>)
PARTITIONED BY (
`year` int)
ROW FORMAT SERDE
'org.openx.data.jsonserde.JsonSerDe'
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
's3://xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx/dev'
}
EOF
command = "aws athena start-query-execution --query-string "query""
}
}
I would like to find the simplest way to do this using terraform.
If you wanna make it for athena, need to make glue resources.
try below code with terraform.
variable "service_name" {
default = "demo-service"
}
variable "workspace" {
default = "dev"
}
variable "columns" {
default = {
id = "int"
type = "string"
status = "int"
created_at = "timestamp"
}
}
resource "aws_glue_catalog_database" "athena" {
name = "${var.service_name}_db"
}
resource "aws_glue_catalog_table" "athena" {
name = "${var.service_name}_logs"
database_name = "${aws_glue_catalog_database.athena.name}"
table_type = "EXTERNAL_TABLE"
parameters = {
EXTERNAL = "TRUE"
}
storage_descriptor {
location = "s3://${var.service_name}-${var.workspace}-data-pipeline/log/"
input_format = "org.apache.hadoop.mapred.TextInputFormat"
output_format = "org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat"
ser_de_info {
name = "jsonserde"
serialization_library = "org.openx.data.jsonserde.JsonSerDe"
parameters = {
"serialization.format" = "1"
}
}
dynamic "columns" {
for_each = "${var.columns}"
content {
name = "${columns.key}"
type = "${columns.value}"
}
}
}
partition_keys {
name = "year"
type = "string"
}
partition_keys {
name = "month"
type = "string"
}
partition_keys {
name = "day"
type = "string"
}
partition_keys {
name = "hour"
type = "string"
}
}
refer to this repository : aws-serverless-data-pipeline-by-terraform
resource "aws_glue_catalog_table" "aws_glue_catalog_table" {
name = "mytable"
database_name = aws_glue_catalog_database.aws_glue_catalog_database.name
table_type = "EXTERNAL_TABLE"
parameters = {
"classification" = "json"
}
storage_descriptor {
location = "s3://mybucket/myprefix"
input_format = "org.apache.hadoop.mapred.TextInputFormat"
output_format = "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"
ser_de_info {
name = "myserdeinfo"
serialization_library = "org.openx.data.jsonserde.JsonSerDe"
parameters = {
"paths" = "jsonrootname"
}
}
columns {
name = "column1"
type = "array<struct<resourcearn:string,tags:array<struct<key:string,value:string>>>>"
}
}
partition_keys {
name = "part1"
type = "string"
}
partition_keys {
name = "part2"
type = "string"
}
}

How to unit test Soap service access from Crm 2011 Silverlight application?

In a Silverlight 5 application in Dynamics CRM 2011 I access the Organization Service of the CRM to query for entity Metadata. I wrote a service that takes an entity name and returns a list of all its fields.
How can I test this service method automatically? The main problem is how to obtain a reference to the organization service from a Silverlight app that does not run in the context of the CRM.
My Service method looks like this:
public IOrganizationService OrganizationService
{
get
{
if (_organizationService == null)
_organizationService = SilverlightUtility.GetSoapService();
return _organizationService;
}
set { _organizationService = value; }
}
public async Task<List<string>> GetAttributeNamesOfEntity(string entityName)
{
// build request
OrganizationRequest request = new OrganizationRequest
{
RequestName = "RetrieveEntity",
Parameters = new ParameterCollection
{
new XrmSoap.KeyValuePair<string, object>()
{
Key = "EntityFilters",
Value = EntityFilters.Attributes
},
new XrmSoap.KeyValuePair<string, object>()
{
Key = "RetrieveAsIfPublished",
Value = true
},
new XrmSoap.KeyValuePair<string, object>()
{
Key = "LogicalName",
Value = "avobase_tradeorder"
},
new XrmSoap.KeyValuePair<string, object>()
{
Key = "MetadataId",
Value = new Guid("00000000-0000-0000-0000-000000000000")
}
}
};
// fire request
IAsyncResult result = OrganizationService.BeginExecute(request, null, OrganizationService);
// wait for response
TaskFactory<OrganizationResponse> tf = new TaskFactory<OrganizationResponse>();
OrganizationResponse response = await tf.FromAsync(
OrganizationService.BeginExecute(request, null, null), iar => OrganizationService.EndExecute(result));
// parse response
EntityMetadata entities = (EntityMetadata)response["EntityMetadata"];
return entities.Attributes.Select(attr => attr.LogicalName).ToList();
}
Edit:
I can create and execute unit tests with Resharper and AgUnit. Thus, the problem is not how to write a unit test in general.
I have tweaked the GetSoapService from the standard Microsoft SDK to accept a fall back value. This means no codes changes are needed when debugging in visual studio and running in CRM. Anyway here it is
public static IOrganizationService GetSoapService(string FallbackValue = null)
{
Uri serviceUrl = new Uri(GetServerBaseUrl(FallbackValue)+ "/XRMServices/2011/Organization.svc/web");
BasicHttpBinding binding = new BasicHttpBinding(Uri.UriSchemeHttps == serviceUrl.Scheme
? BasicHttpSecurityMode.Transport : BasicHttpSecurityMode.TransportCredentialOnly);
binding.MaxReceivedMessageSize = int.MaxValue;
binding.MaxBufferSize = int.MaxValue;
binding.SendTimeout = TimeSpan.FromMinutes(20);
IOrganizationService ser =new OrganizationServiceClient(binding, new EndpointAddress(serviceUrl));
return ser;
}
public static string GetServerBaseUrl(string FallbackValue = null)
{
try
{
string serverUrl = (string)GetContext().Invoke("getClientUrl");
//Remove the trailing forwards slash returned by CRM Online
//So that it is always consistent with CRM On Premises
if (serverUrl.EndsWith("/"))
{
serverUrl = serverUrl.Substring(0, serverUrl.Length - 1);
}
return serverUrl;
}
catch
{
//Try the old getServerUrl
try
{
string serverUrl = (string)GetContext().Invoke("getServerUrl");
//Remove the trailing forwards slash returned by CRM Online
//So that it is always consistent with CRM On Premises
if (serverUrl.EndsWith("/"))
{
serverUrl = serverUrl.Substring(0, serverUrl.Length - 1);
}
return serverUrl;
}
catch
{
return FallbackValue;
}
}

Resources