Is it possible to create a mail merge in Logic app? - azure-logic-apps

In the process of moving over to Azure ecosystem fully, I'm trying to recreate a mail merge via Logic apps.
The basic idea is we get the data output via stored proc with the test data similar to:
create table dbo.People (
Name nvarchar(255),
Local char(1) null,
Earned float null,
Email nvarchar(255) null
);
insert into dbo.People values ('Bob','N',20,'Bob#Fakemail.com');
insert into dbo.People values ('Bob','Y',28,'Bob#Fakemail.com');
insert into dbo.People values ('Jess','N',25,'Jess#Fakemail.com');
insert into dbo.People values ('Jess','Y',39,'Jess#Fakemail.com');
Is this doable in logic app? Trying to avoid paying for any addons.
Tried few tests and it seems to work fine with one line per name/person. Once it brings more than one line it duplicates the emails. Is there a way to combine them all?
-- for for one line outputs (from SQL) of course it will duplicate if there is more than one. Though, not sure how to combine or process in bulk.
What would be the most efficient way of doing this?
Appreciate tips and tricks!

After reproducing from my end, I could be able to achieve this by using 2 until loops where I have defined the condition to check for distinct emails and send the required details using outlook connector.
Alternatively, you can even use foreach loop instead of until if you use a similar logic. Below is the flow of my logic app.
Initially, I have used 2 Queries. One to get the total table and other to get only the distinct emails.
In first Until loop, I have Set the Email to read the distinct emails
In the second until loop, I have used condition and if it is true, the required details will be appended to the array using the below expression.
{
"Name":"#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Name']}",
"Local":"#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Local']}",
"Earned":"#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Earned']}",
"Email":"#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Email']}"
}
Finally, I'm sending the updated variable through mail
Below is the complete Code view of my Logic App
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Execute_a_SQL_query_(V2)": {
"inputs": {
"body": {
"query": "SELECT * FROM dbo.People"
},
"host": {
"connection": {
"name": "#parameters('$connections')['sql']['connectionId']"
}
},
"method": "post",
"path": "/v2/datasets/#{encodeURIComponent(encodeURIComponent('default'))},#{encodeURIComponent(encodeURIComponent('default'))}/query/sql"
},
"runAfter": {},
"type": "ApiConnection"
},
"Execute_a_SQL_query_(V2)_-_Distinct_Emails": {
"inputs": {
"body": {
"query": "SELECT DISTINCT Email FROM dbo.People"
},
"host": {
"connection": {
"name": "#parameters('$connections')['sql']['connectionId']"
}
},
"method": "post",
"path": "/v2/datasets/#{encodeURIComponent(encodeURIComponent('default'))},#{encodeURIComponent(encodeURIComponent('default'))}/query/sql"
},
"runAfter": {
"Execute_a_SQL_query_(V2)": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"Initialize_variable_-_Details": {
"inputs": {
"variables": [
{
"name": "Details",
"type": "array"
}
]
},
"runAfter": {
"Initialize_variable_-_Email": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_-_Email": {
"inputs": {
"variables": [
{
"name": "Email",
"type": "string"
}
]
},
"runAfter": {
"Initialize_variable_-_Loop2": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_-_Loop1": {
"inputs": {
"variables": [
{
"name": "Loop1",
"type": "integer",
"value": 0
}
]
},
"runAfter": {
"Execute_a_SQL_query_(V2)_-_Distinct_Emails": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_-_Loop2": {
"inputs": {
"variables": [
{
"name": "Loop2",
"type": "integer",
"value": 0
}
]
},
"runAfter": {
"Initialize_variable_-_Loop1": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Until_-_Loop1": {
"actions": {
"Compose": {
"inputs": "#variables('Details')",
"runAfter": {
"Increment_variable_-_Loop1": [
"Succeeded"
]
},
"type": "Compose"
},
"Increment_variable_-_Loop1": {
"inputs": {
"name": "Loop1",
"value": 1
},
"runAfter": {
"Until_-_Loop2": [
"Succeeded"
]
},
"type": "IncrementVariable"
},
"Set_variable_Details_to_null": {
"inputs": {
"name": "Details",
"value": "#null"
},
"runAfter": {
"Set_variable_Loop2_to_0": [
"Succeeded"
]
},
"type": "SetVariable"
},
"Set_variable_Distinct_Email": {
"inputs": {
"name": "Email",
"value": "#{body('Execute_a_SQL_query_(V2)_-_Distinct_Emails')?['resultsets']?['Table1'][variables('Loop1')]['Email']}"
},
"runAfter": {},
"type": "SetVariable"
},
"Set_variable_Loop2_to_0": {
"inputs": {
"name": "Loop2",
"value": 0
},
"runAfter": {
"Set_variable_Distinct_Email": [
"Succeeded"
]
},
"type": "SetVariable"
},
"Until_-_Loop2": {
"actions": {
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "Details",
"value": {
"Earned": "#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Earned']}",
"Email": "#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Email']}",
"Local": "#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Local']}",
"Name": "#{body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Name']}"
}
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"expression": {
"and": [
{
"equals": [
"#variables('Email')",
"#body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'][variables('Loop2')]['Email']"
]
}
]
},
"runAfter": {},
"type": "If"
},
"Increment_variable": {
"inputs": {
"name": "Loop2",
"value": 1
},
"runAfter": {
"Condition": [
"Succeeded"
]
},
"type": "IncrementVariable"
}
},
"expression": "#equals(variables('Loop2'), length(array(body('Execute_a_SQL_query_(V2)')?['resultsets']?['Table1'])))",
"limit": {
"count": 60,
"timeout": "PT1H"
},
"runAfter": {
"Set_variable_Details_to_null": [
"Succeeded"
]
},
"type": "Until"
}
},
"expression": "#equals(variables('Loop1'), length(array(body('Execute_a_SQL_query_(V2)_-_Distinct_Emails')?['resultsets']?['Table1'])))",
"limit": {
"count": 60,
"timeout": "PT1H"
},
"runAfter": {
"Initialize_variable_-_Details": [
"Succeeded"
]
},
"type": "Until"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {
"$connections": {
"value": {
"sql": {
"connectionId": "/subscriptions/<ID>/resourceGroups/<RG>/providers/Microsoft.Web/connections/sql",
"connectionName": "sql",
"id": "/subscriptions/<ID>/providers/Microsoft.Web/locations/westus2/managedApis/sql"
}
}
}
}
}

Related

How to construct a JSON when [keys] are seperated from [values] inside Logic App

I have been struggling with this for a long time to construct an output where my [key] is separated from my array value, please help to share your expertise.
Below is the response, i have received from Azure Alert and I'm looking forward to create an output (refer Expected Output image).
{
"tables": [
{
"columns": [
{
"name": "task",
"type": "string"
},
{
"name": "environment",
"type": "string"
},
{
"name": "workspace",
"type": "string"
}
],
"name": "PrimaryResult",
"rows": [
[
"job_name_1",
"PRODUCTION",
"WORKSPACE-1",
],
[
"job_name_2",
"TEST",
"WORKSPACE-2",
]
]
}
]
}
Inside Logic App. I have parsed the json and used a lot of for each loop variations but, every single time, i get stuck in traversing the rows as its a 2d array. Even though, i traverse them, i cannot associate them with columns. Any alternative approaches will be appreciated ?
Input
Expected Output
[
{
"task": "job_name_1",
"environment": "PRODUCTION",
"workspace" : "WORKSPACE-1"
},
{
"task": "job_name_2",
"environment": "TEST",
"workspace" : "WORKSPACE-2"
}
]
I'd be lying if I said this was straight forward but you can load the below definition into your tenant and see a working version.
Note: I made the assumption that you only ever have one table in your top level array given that's what you provided in your question.
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_Each_Row": {
"actions": {
"Append_To_Transformed_Array": {
"inputs": {
"name": "Transformed Array",
"value": "#variables('Row Object')"
},
"runAfter": {
"For_Each_Column": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"For_Each_Column": {
"actions": {
"Add_Property_To_Temp_Row_Object": {
"inputs": {
"name": "Temp Row Object",
"value": "#addProperty(variables('Row Object'), variables('Columns')?[variables('Column Index')]['name'], items('For_Each_Column'))"
},
"runAfter": {},
"type": "SetVariable"
},
"Increment_Column_Index": {
"inputs": {
"name": "Column Index",
"value": 1
},
"runAfter": {
"Set_Row_Object_From_Temp_Row_Object": [
"Succeeded"
]
},
"type": "IncrementVariable"
},
"Set_Row_Object_From_Temp_Row_Object": {
"inputs": {
"name": "Row Object",
"value": "#variables('Temp Row Object')"
},
"runAfter": {
"Add_Property_To_Temp_Row_Object": [
"Succeeded"
]
},
"type": "SetVariable"
}
},
"foreach": "#item()",
"runAfter": {
"Reset_Column_Index": [
"Succeeded"
]
},
"type": "Foreach"
},
"Reset_Column_Index": {
"inputs": {
"name": "Column Index",
"value": 0
},
"runAfter": {
"Reset_Temp_Row_Object": [
"Succeeded"
]
},
"type": "SetVariable"
},
"Reset_Row_Object": {
"inputs": {
"name": "Row Object",
"value": {}
},
"runAfter": {},
"type": "SetVariable"
},
"Reset_Temp_Row_Object": {
"inputs": {
"name": "Temp Row Object",
"value": {}
},
"runAfter": {
"Reset_Row_Object": [
"Succeeded"
]
},
"type": "SetVariable"
}
},
"foreach": "#variables('Rows')",
"runAfter": {
"Initialize_Column_Index": [
"Succeeded"
]
},
"runtimeConfiguration": {
"concurrency": {
"repetitions": 1
}
},
"type": "Foreach"
},
"Initialize_Column_Index": {
"inputs": {
"variables": [
{
"name": "Column Index",
"type": "integer"
}
]
},
"runAfter": {
"Initialize_Temp_Row_Object": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_Columns": {
"inputs": {
"variables": [
{
"name": "Columns",
"type": "array",
"value": "#variables('Data')?['tables'][0]['columns']"
}
]
},
"runAfter": {
"Initialize_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_Data": {
"inputs": {
"variables": [
{
"name": "Data",
"type": "object",
"value": {
"tables": [
{
"columns": [
{
"name": "task",
"type": "string"
},
{
"name": "environment",
"type": "string"
},
{
"name": "workspace",
"type": "string"
}
],
"name": "PrimaryResult",
"rows": [
[
"job_name_1",
"PRODUCTION",
"WORKSPACE-1"
],
[
"job_name_2",
"TEST",
"WORKSPACE-2"
]
]
}
]
}
}
]
},
"runAfter": {},
"type": "InitializeVariable"
},
"Initialize_Row_Object": {
"inputs": {
"variables": [
{
"name": "Row Object",
"type": "object"
}
]
},
"runAfter": {
"Initialize_Transformed_Array": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_Rows": {
"inputs": {
"variables": [
{
"name": "Rows",
"type": "array",
"value": "#variables('Data')?['tables'][0]['rows']"
}
]
},
"runAfter": {
"Initialize_Columns": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_Temp_Row_Object": {
"inputs": {
"variables": [
{
"name": "Temp Row Object",
"type": "object",
"value": {}
}
]
},
"runAfter": {
"Initialize_Row_Object": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_Transformed_Array": {
"inputs": {
"variables": [
{
"name": "Transformed Array",
"type": "array"
}
]
},
"runAfter": {
"Initialize_Rows": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Result": {
"inputs": "#variables('Transformed Array')",
"runAfter": {
"For_Each_Row": [
"Succeeded"
]
},
"type": "Compose"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"method": "GET",
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}
The main challenges I see with having to do this are ...
The lack of ability to self reference. Because of that, I had to use a Row Object in conjunction with a Temp Row Object when adding the property each time.
The amount of nesting is a little hard to follow but it works.
Performance will be a real burden if you have a lot of rows.
One thing to note is the outer For Each action needs to have the concurrency set to 1, if it's not, you'll run into problems.
This is the end result as you were wanting ...

Check string is empty, white space or null Logic Apps

I am parsing CSV and checking if any item/string is empty/white space/null but none if the condition is not working!!
What I am doing wrong here?
Logic to check
Scenario
Parse CSV by each line and then by each column and check if string is empty/whitespace/null
If string is empty/whitespace/null
IscorrectCSV =False
Else
IscorrectCSV =True
INPUT 1
name,age
a1,34
a2,null
a3," "
a4,""
a5," 4"
A6,
EXPECTED OUTPUT 1
IscorrectCSV =False
INPUT 2
name,age
a1,34
a2,35
a3,36
EXPECTED OUTPUT 2
IscorrectCSV =True
First you need replace and instead of OR
remove first comparison and set variable before condition
First check if is equal null
Secondly check if the formula length() is equal to 0
After reproducing from my end, I could able to achieve this using below flow.
First I tried to form an Array from the sample csv given and then I used condition action inside a for-each loop to loop through second column item and check for the condition.
RESULTS:
You can reproduce the same in your logic app using the below code-view
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Array": {
"inputs": {
"variables": [
{
"name": "Array",
"type": "array",
"value": "#skip(split(outputs('Compose'),'\n'),1)"
}
]
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Compose": {
"inputs": "name,age\na1,34\na2,NULL\na3, \na4,''\na5,",
"runAfter": {},
"type": "Compose"
},
"Compose_2": {
"inputs": "#variables('FinalArray')",
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Compose"
},
"FinalArray": {
"inputs": {
"variables": [
{
"name": "FinalArray",
"type": "array"
}
]
},
"runAfter": {
"Array": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"For_each": {
"actions": {
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "FinalArray",
"value": "#{items('For_each')} - #{variables('IscorrectCSV')}"
},
"runAfter": {
"Set_IscorrectCSV_-_False": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Set_IscorrectCSV_-_False": {
"inputs": {
"name": "IscorrectCSV",
"value": false
},
"runAfter": {},
"type": "SetVariable"
}
},
"else": {
"actions": {
"Append_to_array_variable_2": {
"inputs": {
"name": "FinalArray",
"value": "#{items('For_each')} - #{variables('IscorrectCSV')}"
},
"runAfter": {
"Set_IscorrectCSV_-_True": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Set_IscorrectCSV_-_True": {
"inputs": {
"name": "IscorrectCSV",
"value": true
},
"runAfter": {},
"type": "SetVariable"
}
}
},
"expression": {
"or": [
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
"NULL"
]
},
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
" "
]
},
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
"''"
]
},
{
"lessOrEquals": [
"#length(slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each'))))",
0
]
}
]
},
"runAfter": {},
"type": "If"
}
},
"foreach": "#variables('Array')",
"runAfter": {
"IscorrectCSV": [
"Succeeded"
]
},
"type": "Foreach"
},
"IscorrectCSV": {
"inputs": {
"variables": [
{
"name": "IscorrectCSV",
"type": "boolean"
}
]
},
"runAfter": {
"FinalArray": [
"Succeeded"
]
},
"type": "InitializeVariable"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}
You can use the empty and trim expression together to find if the array item has some value

How to assign JSON field from one data source to another in Logic App

I have two different JSON data coming into my Logic App:
Data 1:
[
{
"EmployeeCode":"123",
"Username":"abc"
},
{
"EmployeeCode":"456",
"Username":"def"
}
]
Data 2:
[
{
"EmployeeCode":"123",
"Team":"IT"
},
{
"EmployeeCode":"456",
"Team":"Finance"
}
]
And I want to generate final output like this:
Final output:
[
{
"EmployeeCode":"123",
"Username":"abc",
"Team":"IT"
},
{
"EmployeeCode":"456",
"Username":"def",
"Team":"Finance"
}
]
Is there a simple way to achieve this in Logic App itself? Without using JavaScript or Azure Function or anything?
After reproducing from our end here is how we could able to achieve your requirement.
First, we have used 2 Parse JSON for each data to extract the items in the JSON.
then used a condition connector to compare the EmployeeCode, and then merged using compose connector.
However to make the whole JSON to be used for future purposes we have initialized an array variable and then appended the successful runs from the condition connector. Here is my logic app.
RESULTS:
Below is the code view of my logic app
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"FinalJSON": {
"inputs": "#variables('FinalJson')",
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Compose"
},
"For_each": {
"actions": {
"For_each_2": {
"actions": {
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "FinalJson",
"value": "#outputs('Compose')"
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Compose": {
"inputs": {
"EmployeeCode": "#{items('For_each')['EmployeeCode']}",
"Team": "#{items('For_each_2')['Team']}",
"Username": "#{items('For_each')['Username']}"
},
"runAfter": {},
"type": "Compose"
}
},
"expression": {
"and": [
{
"equals": [
"#items('For_each')['EmployeeCode']",
"#items('For_each_2')['EmployeeCode']"
]
}
]
},
"runAfter": {},
"type": "If"
}
},
"foreach": "#body('Parse_JSON2')",
"runAfter": {},
"type": "Foreach"
}
},
"foreach": "#body('Parse_JSON1')",
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "Foreach"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "FinalJson",
"type": "array"
}
]
},
"runAfter": {
"Parse_JSON2": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"JSON1": {
"inputs": [
{
"EmployeeCode": "123",
"Username": "abc"
},
{
"EmployeeCode": "456",
"Username": "def"
}
],
"runAfter": {},
"type": "Compose"
},
"JSON2": {
"inputs": [
{
"EmployeeCode": "123",
"Team": "IT"
},
{
"EmployeeCode": "456",
"Team": "Finance"
}
],
"runAfter": {
"JSON1": [
"Succeeded"
]
},
"type": "Compose"
},
"Parse_JSON1": {
"inputs": {
"content": "#outputs('JSON1')",
"schema": {
"items": {
"properties": {
"EmployeeCode": {
"type": "string"
},
"Username": {
"type": "string"
}
},
"required": [
"EmployeeCode",
"Username"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"JSON2": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_JSON2": {
"inputs": {
"content": "#outputs('JSON2')",
"schema": {
"items": {
"properties": {
"EmployeeCode": {
"type": "string"
},
"Team": {
"type": "string"
}
},
"required": [
"EmployeeCode",
"Team"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"Parse_JSON1": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}

How to conditionally transform JSON based on body response in Azure Logic App

I Want to have an Azure Logic App that receives HTTP data from multiple device types.
Each device has different body JSON schema structure.
Base on the response body (structure or specific value) I want to execute specific Liquid Map so that the result will be normalized response.
(Assume that I cannot check on HTTP headers...)
Is there a better pattern than Logic App?
Thanks!
You can create a basic json schema with atleast one field that could identify where the call came from (caller should send that field in the body) and based on that field parse the incoming json against a more comprehensive schema (format 1, 2 etc).
However, I would recommend creating separate logic apps if there is a major diff b/w input posted from the sources. It is not a good practice to have different flows based on the caller. Logic app should have minimal logic. Your approach might be not be good in the long term.
Here is one of the workaround that you can try.
Here is my logic app
I have considered a sample data to explain this in a better way.
Here is the data that I'm sending to the HTTP Trigger:-
{
"devices": [
{
"device_number": 1,
"device_type": "mobile",
"device_name": "MobileA"
},
{
"device_number": 2,
"device_type": "desktop",
"device_name": "DesktopA"
},
{
"device_number": 3,
"device_type": "tablet",
"device_name": "TabletA"
},
{
"device_number": 4,
"device_type": "desktop",
"device_name": "DesktopB"
}
]
}
Then I'm trying to parse the whole data that I'm sending for future use and performing some functions which segregates the json into device_number, device_type and device_name.
Here is the schema that I'm using to parse that data that is sent to the Http Trigger
{
"type": "object",
"properties": {
"devices": {
"type": "array",
"items": {
"type": "object",
"properties": {
"device_number": {
"type": "integer"
},
"device_type": {
"type": "string"
},
"device_name": {
"type": "string"
}
},
"required": [
"device_number",
"device_type",
"device_name"
]
}
}
}
}
In the next step I'm parsing the Data for your understanding but this is completely avoidable when you are storing the whole data into arrays
For combining or normalizing the results we can either use a compose connector and save to storage account else we can also directly send the same data directly using cosmos db Create or update document connector.
Here is the codeview of my logic app:-
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Compose": {
"inputs": {
"DevicesInformation": [
{
"DeviceNames": "#{variables('DeviceName')}",
"DeviceNumbers": "#{variables('DeviceNumber')}",
"DeviceTypes": "#{variables('DeviceType')}"
}
]
},
"runAfter": {
"Parse_DeviceName": [
"Succeeded"
],
"Parse_DeviceNumber": [
"Succeeded"
],
"Parse_DeviceType": [
"Succeeded"
]
},
"type": "Compose"
},
"Create_or_update_document_(V3)": {
"inputs": {
"body": {
"DevicesInformation": [
{
"DeviceNames": "#{variables('DeviceName')}",
"DeviceNumbers": "#{variables('DeviceNumber')}",
"DeviceTypes": "#{variables('DeviceType')}"
}
],
"id": "#{guid()}"
},
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb_1']['connectionId']"
}
},
"method": "post",
"path": "/v2/cosmosdb/#{encodeURIComponent('AccountNameFromSettings')}/dbs/#{encodeURIComponent('container2408')}/colls/#{encodeURIComponent('#devices')}/docs"
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"For_each": {
"actions": {
"Append_to_DeviceNumber_variable": {
"inputs": {
"name": "DeviceNumber",
"value": "#items('For_each')?['device_number']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceNumber": [
"Succeeded"
]
},
"type": "Foreach"
},
"For_each_2": {
"actions": {
"Append_to_DeviceType_variable": {
"inputs": {
"name": "DeviceType",
"value": "#items('For_each_2')?['device_type']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceType": [
"Succeeded"
]
},
"type": "Foreach"
},
"For_each_3": {
"actions": {
"Append_to_DeviceName_variable": {
"inputs": {
"name": "DeviceName",
"value": "#items('For_each_3')?['device_name']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceName": [
"Succeeded"
]
},
"type": "Foreach"
},
"Initialize_variable_DeviceName": {
"inputs": {
"variables": [
{
"name": "DeviceName",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_DeviceNumber": {
"inputs": {
"variables": [
{
"name": "DeviceNumber",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_DeviceType": {
"inputs": {
"variables": [
{
"name": "DeviceType",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Parse_Data": {
"inputs": {
"content": "#triggerBody()",
"schema": {
"properties": {
"devices": {
"items": {
"properties": {
"device_name": {
"type": "string"
},
"device_number": {
"type": "integer"
},
"device_type": {
"type": "string"
}
},
"required": [
"device_number",
"device_type",
"device_name"
],
"type": "object"
},
"type": "array"
}
},
"type": "object"
}
},
"runAfter": {},
"type": "ParseJson"
},
"Parse_DeviceName": {
"inputs": {
"content": "#variables('DeviceName')",
"schema": {
"items": {
"type": "string"
},
"type": "array"
}
},
"runAfter": {
"For_each_3": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_DeviceNumber": {
"inputs": {
"content": "#variables('DeviceNumber')",
"schema": {
"items": {
"type": "integer"
},
"type": "array"
}
},
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_DeviceType": {
"inputs": {
"content": "#variables('DeviceType')",
"schema": {
"items": {
"type": "string"
},
"type": "array"
}
},
"runAfter": {
"For_each_2": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {
"$connections": {
"value": {
"documentdb_1": {
"connectionId": "/subscriptions/<YOUR_SUBSCRIPTION_ID>/resourceGroups/<YOUR_RESOURCE_GROUP>/providers/Microsoft.Web/connections/documentdb-1",
"connectionName": "documentdb-1",
"id": "/subscriptions/<YOUR_SUBSCRIPTION_ID>/providers/Microsoft.Web/locations/northcentralus/managedApis/documentdb"
}
}
}
}
}

Azure Logic App Condition does not work in loop if based on changing values

I need to write a simple LogicApp that connects to a http endpoint, receives some JSON, loops though the JSON message and submits it to a different http endpoint in chunks based on a value in the message.
In doing this I've come to the conclusion that Conditions inside For Each loop always evaluate the conditions before the loop and executes the path that matched the result before the loop, even though the result should have changed as the some variables have been updated in the loop.
I've managed to illustrate the problem with the below example.
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Compose": {
"inputs": "#variables('TestStr')",
"runAfter": {
"Compose_3": [
"Succeeded"
]
},
"type": "Compose"
},
"Compose_3": {
"inputs": "#variables('TestArray')",
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Compose"
},
"For_each": {
"actions": {
"Compose_2": {
"inputs": "#variables('TestArray')",
"runAfter": {
"Condition": [
"Succeeded"
]
},
"type": "Compose"
},
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "TestArray",
"value": "#items('For_each')"
},
"runAfter": {},
"type": "AppendToArrayVariable"
},
"Set_variable_2": {
"inputs": {
"name": "TestStr",
"value": "XXXX"
},
"runAfter": {
"Append_to_array_variable": [
"Succeeded"
]
},
"type": "SetVariable"
}
},
"else": {
"actions": {
"Set_variable": {
"inputs": {
"name": "TestStr",
"value": "not"
},
"runAfter": {},
"type": "SetVariable"
}
}
},
"expression": {
"and": [
{
"equals": [
"#variables('TestStr')",
"BlankValue"
]
}
]
},
"runAfter": {},
"type": "If"
}
},
"foreach": "#variables('FullArray')",
"runAfter": {
"Initialize_variable_3": [
"Succeeded"
]
},
"type": "Foreach"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "TestStr",
"type": "String",
"value": "BlankValue"
}
]
},
"runAfter": {},
"type": "InitializeVariable"
},
"Initialize_variable_2": {
"inputs": {
"variables": [
{
"name": "TestArray",
"type": "Array"
}
]
},
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_3": {
"inputs": {
"variables": [
{
"name": "FullArray",
"type": "Array",
"value": [
{
"key": "value1"
},
{
"key": "value2"
},
{
"key": "value3"
}
]
}
]
},
"runAfter": {
"Initialize_variable_2": [
"Succeeded"
]
},
"type": "InitializeVariable"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"Recurrence": {
"recurrence": {
"frequency": "Month",
"interval": 3
},
"type": "Recurrence"
}
}
}
}
I would expect the below LogicApp to execute the loop three times, every time evaluate the condition and execute array insert only once, with the TestArray containing one entry of
{
"key":"value1"
}
and string TestStr having value of 'not'
But the actual results seem to differ - TestArray contains all three entries from FullArray and TestStr is 'XXXX'
What am I missing here? Are there any workarounds?
I've discovered the reason for this behaviour. Posting here if anyone comes across this in the future.
For_each loops are executed in parallel by default. That is why the conditions are evaluated for all iterations before the loop starts.
There is a way of switching this off - in the Settings you can switch on concurrency control and set parallelism to 1

Resources