Convert array to string variable in Logic App - azure-logic-apps

What's the quickest way to convert an array to a string variable (whilst keeping all the line breaks) using Logic App? Below is my definition which reads each array and append to a string variable, works fine but not the most efficient. The idea is that I can then create a BLOB from the final output without the ["..."]
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_each": {
"actions": {
"Append_to_string_variable": {
"inputs": {
"name": "Result String",
"value": "#concat(substring(items('For_each'), 0, sub(length(items('For_each')), 0)), '\r\n')"
},
"runAfter": {},
"type": "AppendToStringVariable"
}
},
"foreach": "#variables('CSV Content')",
"runAfter": {
"Initialize_Result_String": [
"Succeeded"
]
},
"runtimeConfiguration": {
"concurrency": {
"repetitions": 1
}
},
"type": "Foreach"
},
"Initialize_Result_String": {
"inputs": {
"variables": [
{
"name": "Result String",
"type": "string"
}
]
},
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "CSV Content",
"type": "array",
"value": [
"Header1,Header2",
"Data1,Data2",
"Data3,Data4"
]
}
]
},
"runAfter": {},
"type": "InitializeVariable"
},
"Initialize_variable_2": {
"inputs": {
"variables": [
{
"name": "Final Result",
"type": "string",
"value": "#variables('Result String')"
}
]
},
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "InitializeVariable"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}
Basically I'm looking for a way to convert the array
[
"Header1,Header2",
"Data1,Data2",
"Data3,Data4"
]
to a string
Header1,Header2
Data1,Data2
Data3,Data4

One of the workarounds is to use the Join Connector which takes joins each item in the array. Here is the screenshot of my logic app
RESULT:-
In Storage account:-
Below is the codeview of my logic app
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Create_blob_(V2)": {
"inputs": {
"body": "#body('Join')",
"headers": {
"ReadFileMetadataFromServer": true
},
"host": {
"connection": {
"name": "#parameters('$connections')['azureblob']['connectionId']"
}
},
"method": "post",
"path": "/v2/datasets/#{encodeURIComponent(encodeURIComponent('AccountNameFromSettings'))}/files",
"queries": {
"folderPath": "/container1",
"name": "sample1",
"queryParametersSingleEncoded": true
}
},
"runAfter": {
"Join": [
"Succeeded"
]
},
"runtimeConfiguration": {
"contentTransfer": {
"transferMode": "Chunked"
}
},
"type": "ApiConnection"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "CSV Content",
"type": "array",
"value": [
"Header1,Header2",
"Data1,Data2",
"Data3,Data4"
]
}
]
},
"runAfter": {},
"type": "InitializeVariable"
},
"Join": {
"inputs": {
"from": "#variables('CSV Content')",
"joinWith": "\n"
},
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "Join"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"manual": {
"inputs": {},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {
"$connections": {
"value": {
"azureblob": {
"connectionId": "/subscriptions/<Your_Subscription_Id>/resourceGroups/<Your_ResourceGroup>/providers/Microsoft.Web/connections/azureblob",
"connectionName": "azureblob",
"id": "/subscriptions/<Your_Subscription_Id>/providers/Microsoft.Web/locations/northcentralus/managedApis/azureblob"
}
}
}
}
}

Related

Check string is empty, white space or null Logic Apps

I am parsing CSV and checking if any item/string is empty/white space/null but none if the condition is not working!!
What I am doing wrong here?
Logic to check
Scenario
Parse CSV by each line and then by each column and check if string is empty/whitespace/null
If string is empty/whitespace/null
IscorrectCSV =False
Else
IscorrectCSV =True
INPUT 1
name,age
a1,34
a2,null
a3," "
a4,""
a5," 4"
A6,
EXPECTED OUTPUT 1
IscorrectCSV =False
INPUT 2
name,age
a1,34
a2,35
a3,36
EXPECTED OUTPUT 2
IscorrectCSV =True
First you need replace and instead of OR
remove first comparison and set variable before condition
First check if is equal null
Secondly check if the formula length() is equal to 0
After reproducing from my end, I could able to achieve this using below flow.
First I tried to form an Array from the sample csv given and then I used condition action inside a for-each loop to loop through second column item and check for the condition.
RESULTS:
You can reproduce the same in your logic app using the below code-view
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Array": {
"inputs": {
"variables": [
{
"name": "Array",
"type": "array",
"value": "#skip(split(outputs('Compose'),'\n'),1)"
}
]
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Compose": {
"inputs": "name,age\na1,34\na2,NULL\na3, \na4,''\na5,",
"runAfter": {},
"type": "Compose"
},
"Compose_2": {
"inputs": "#variables('FinalArray')",
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Compose"
},
"FinalArray": {
"inputs": {
"variables": [
{
"name": "FinalArray",
"type": "array"
}
]
},
"runAfter": {
"Array": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"For_each": {
"actions": {
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "FinalArray",
"value": "#{items('For_each')} - #{variables('IscorrectCSV')}"
},
"runAfter": {
"Set_IscorrectCSV_-_False": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Set_IscorrectCSV_-_False": {
"inputs": {
"name": "IscorrectCSV",
"value": false
},
"runAfter": {},
"type": "SetVariable"
}
},
"else": {
"actions": {
"Append_to_array_variable_2": {
"inputs": {
"name": "FinalArray",
"value": "#{items('For_each')} - #{variables('IscorrectCSV')}"
},
"runAfter": {
"Set_IscorrectCSV_-_True": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Set_IscorrectCSV_-_True": {
"inputs": {
"name": "IscorrectCSV",
"value": true
},
"runAfter": {},
"type": "SetVariable"
}
}
},
"expression": {
"or": [
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
"NULL"
]
},
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
" "
]
},
{
"equals": [
"#slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each')))",
"''"
]
},
{
"lessOrEquals": [
"#length(slice(items('For_each'),add(indexOf(items('For_each'),','),1),length(items('For_each'))))",
0
]
}
]
},
"runAfter": {},
"type": "If"
}
},
"foreach": "#variables('Array')",
"runAfter": {
"IscorrectCSV": [
"Succeeded"
]
},
"type": "Foreach"
},
"IscorrectCSV": {
"inputs": {
"variables": [
{
"name": "IscorrectCSV",
"type": "boolean"
}
]
},
"runAfter": {
"FinalArray": [
"Succeeded"
]
},
"type": "InitializeVariable"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}
You can use the empty and trim expression together to find if the array item has some value

How to assign JSON field from one data source to another in Logic App

I have two different JSON data coming into my Logic App:
Data 1:
[
{
"EmployeeCode":"123",
"Username":"abc"
},
{
"EmployeeCode":"456",
"Username":"def"
}
]
Data 2:
[
{
"EmployeeCode":"123",
"Team":"IT"
},
{
"EmployeeCode":"456",
"Team":"Finance"
}
]
And I want to generate final output like this:
Final output:
[
{
"EmployeeCode":"123",
"Username":"abc",
"Team":"IT"
},
{
"EmployeeCode":"456",
"Username":"def",
"Team":"Finance"
}
]
Is there a simple way to achieve this in Logic App itself? Without using JavaScript or Azure Function or anything?
After reproducing from our end here is how we could able to achieve your requirement.
First, we have used 2 Parse JSON for each data to extract the items in the JSON.
then used a condition connector to compare the EmployeeCode, and then merged using compose connector.
However to make the whole JSON to be used for future purposes we have initialized an array variable and then appended the successful runs from the condition connector. Here is my logic app.
RESULTS:
Below is the code view of my logic app
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"FinalJSON": {
"inputs": "#variables('FinalJson')",
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Compose"
},
"For_each": {
"actions": {
"For_each_2": {
"actions": {
"Condition": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "FinalJson",
"value": "#outputs('Compose')"
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "AppendToArrayVariable"
},
"Compose": {
"inputs": {
"EmployeeCode": "#{items('For_each')['EmployeeCode']}",
"Team": "#{items('For_each_2')['Team']}",
"Username": "#{items('For_each')['Username']}"
},
"runAfter": {},
"type": "Compose"
}
},
"expression": {
"and": [
{
"equals": [
"#items('For_each')['EmployeeCode']",
"#items('For_each_2')['EmployeeCode']"
]
}
]
},
"runAfter": {},
"type": "If"
}
},
"foreach": "#body('Parse_JSON2')",
"runAfter": {},
"type": "Foreach"
}
},
"foreach": "#body('Parse_JSON1')",
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "Foreach"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "FinalJson",
"type": "array"
}
]
},
"runAfter": {
"Parse_JSON2": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"JSON1": {
"inputs": [
{
"EmployeeCode": "123",
"Username": "abc"
},
{
"EmployeeCode": "456",
"Username": "def"
}
],
"runAfter": {},
"type": "Compose"
},
"JSON2": {
"inputs": [
{
"EmployeeCode": "123",
"Team": "IT"
},
{
"EmployeeCode": "456",
"Team": "Finance"
}
],
"runAfter": {
"JSON1": [
"Succeeded"
]
},
"type": "Compose"
},
"Parse_JSON1": {
"inputs": {
"content": "#outputs('JSON1')",
"schema": {
"items": {
"properties": {
"EmployeeCode": {
"type": "string"
},
"Username": {
"type": "string"
}
},
"required": [
"EmployeeCode",
"Username"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"JSON2": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_JSON2": {
"inputs": {
"content": "#outputs('JSON2')",
"schema": {
"items": {
"properties": {
"EmployeeCode": {
"type": "string"
},
"Team": {
"type": "string"
}
},
"required": [
"EmployeeCode",
"Team"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"Parse_JSON1": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}

How to conditionally transform JSON based on body response in Azure Logic App

I Want to have an Azure Logic App that receives HTTP data from multiple device types.
Each device has different body JSON schema structure.
Base on the response body (structure or specific value) I want to execute specific Liquid Map so that the result will be normalized response.
(Assume that I cannot check on HTTP headers...)
Is there a better pattern than Logic App?
Thanks!
You can create a basic json schema with atleast one field that could identify where the call came from (caller should send that field in the body) and based on that field parse the incoming json against a more comprehensive schema (format 1, 2 etc).
However, I would recommend creating separate logic apps if there is a major diff b/w input posted from the sources. It is not a good practice to have different flows based on the caller. Logic app should have minimal logic. Your approach might be not be good in the long term.
Here is one of the workaround that you can try.
Here is my logic app
I have considered a sample data to explain this in a better way.
Here is the data that I'm sending to the HTTP Trigger:-
{
"devices": [
{
"device_number": 1,
"device_type": "mobile",
"device_name": "MobileA"
},
{
"device_number": 2,
"device_type": "desktop",
"device_name": "DesktopA"
},
{
"device_number": 3,
"device_type": "tablet",
"device_name": "TabletA"
},
{
"device_number": 4,
"device_type": "desktop",
"device_name": "DesktopB"
}
]
}
Then I'm trying to parse the whole data that I'm sending for future use and performing some functions which segregates the json into device_number, device_type and device_name.
Here is the schema that I'm using to parse that data that is sent to the Http Trigger
{
"type": "object",
"properties": {
"devices": {
"type": "array",
"items": {
"type": "object",
"properties": {
"device_number": {
"type": "integer"
},
"device_type": {
"type": "string"
},
"device_name": {
"type": "string"
}
},
"required": [
"device_number",
"device_type",
"device_name"
]
}
}
}
}
In the next step I'm parsing the Data for your understanding but this is completely avoidable when you are storing the whole data into arrays
For combining or normalizing the results we can either use a compose connector and save to storage account else we can also directly send the same data directly using cosmos db Create or update document connector.
Here is the codeview of my logic app:-
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Compose": {
"inputs": {
"DevicesInformation": [
{
"DeviceNames": "#{variables('DeviceName')}",
"DeviceNumbers": "#{variables('DeviceNumber')}",
"DeviceTypes": "#{variables('DeviceType')}"
}
]
},
"runAfter": {
"Parse_DeviceName": [
"Succeeded"
],
"Parse_DeviceNumber": [
"Succeeded"
],
"Parse_DeviceType": [
"Succeeded"
]
},
"type": "Compose"
},
"Create_or_update_document_(V3)": {
"inputs": {
"body": {
"DevicesInformation": [
{
"DeviceNames": "#{variables('DeviceName')}",
"DeviceNumbers": "#{variables('DeviceNumber')}",
"DeviceTypes": "#{variables('DeviceType')}"
}
],
"id": "#{guid()}"
},
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb_1']['connectionId']"
}
},
"method": "post",
"path": "/v2/cosmosdb/#{encodeURIComponent('AccountNameFromSettings')}/dbs/#{encodeURIComponent('container2408')}/colls/#{encodeURIComponent('#devices')}/docs"
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"For_each": {
"actions": {
"Append_to_DeviceNumber_variable": {
"inputs": {
"name": "DeviceNumber",
"value": "#items('For_each')?['device_number']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceNumber": [
"Succeeded"
]
},
"type": "Foreach"
},
"For_each_2": {
"actions": {
"Append_to_DeviceType_variable": {
"inputs": {
"name": "DeviceType",
"value": "#items('For_each_2')?['device_type']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceType": [
"Succeeded"
]
},
"type": "Foreach"
},
"For_each_3": {
"actions": {
"Append_to_DeviceName_variable": {
"inputs": {
"name": "DeviceName",
"value": "#items('For_each_3')?['device_name']"
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_Data')?['devices']",
"runAfter": {
"Initialize_variable_DeviceName": [
"Succeeded"
]
},
"type": "Foreach"
},
"Initialize_variable_DeviceName": {
"inputs": {
"variables": [
{
"name": "DeviceName",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_DeviceNumber": {
"inputs": {
"variables": [
{
"name": "DeviceNumber",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_DeviceType": {
"inputs": {
"variables": [
{
"name": "DeviceType",
"type": "array"
}
]
},
"runAfter": {
"Parse_Data": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Parse_Data": {
"inputs": {
"content": "#triggerBody()",
"schema": {
"properties": {
"devices": {
"items": {
"properties": {
"device_name": {
"type": "string"
},
"device_number": {
"type": "integer"
},
"device_type": {
"type": "string"
}
},
"required": [
"device_number",
"device_type",
"device_name"
],
"type": "object"
},
"type": "array"
}
},
"type": "object"
}
},
"runAfter": {},
"type": "ParseJson"
},
"Parse_DeviceName": {
"inputs": {
"content": "#variables('DeviceName')",
"schema": {
"items": {
"type": "string"
},
"type": "array"
}
},
"runAfter": {
"For_each_3": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_DeviceNumber": {
"inputs": {
"content": "#variables('DeviceNumber')",
"schema": {
"items": {
"type": "integer"
},
"type": "array"
}
},
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Parse_DeviceType": {
"inputs": {
"content": "#variables('DeviceType')",
"schema": {
"items": {
"type": "string"
},
"type": "array"
}
},
"runAfter": {
"For_each_2": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {
"$connections": {
"value": {
"documentdb_1": {
"connectionId": "/subscriptions/<YOUR_SUBSCRIPTION_ID>/resourceGroups/<YOUR_RESOURCE_GROUP>/providers/Microsoft.Web/connections/documentdb-1",
"connectionName": "documentdb-1",
"id": "/subscriptions/<YOUR_SUBSCRIPTION_ID>/providers/Microsoft.Web/locations/northcentralus/managedApis/documentdb"
}
}
}
}
}

'Send an Email' parameter 'attachment content' cannot be null

I'm creating an Azure Logic App to email a CSV file with data. When there is no data, the Send_an_email_(V2) step is failing and I see the following error in its output:
Parameter 'Attachment Content' cannot be null or empty.
The Send_an_email_(V2) action in the Azure Logic App gets part of it's input from the output of a preceding Create_CSV_table action. It uses the body of the Create_CSV_table output as shown below, in order to construct ContentBytes for an email attachment:
"actions": {
"Create_CSV_table": {
"inputs": {
"format": "CSV",
"from": "#body('Parse_JSON')"
},
"runAfter": {
"Parse_JSON": [
"Succeeded"
]
},
"type": "Table"
},
"Send_an_email_(V2)": {
"inputs": {
"body": {
"Attachments": [
{
"ContentBytes": "#{base64(body('Create_CSV_table'))}",
"Name": "report.csv"
}
],
"Body": "<p></p>",
"Subject": "My Report",
"To": "me#email.com"
},
"host": {
"connection": {
"name": "#parameters('$connections')['office365']['connectionId']"
}
},
"method": "post",
"path": "/v2/Mail"
},
"runAfter": {
"Create_CSV_table": [
"Succeeded"
]
},
"type": "ApiConnection"
}
When there is no data input into the Create_CSV_table step, the Create_CSV_table step is successful, and the raw output for the Create_CSV_table step shows an empty body as follows:
{
"body": ""
}
Raw input for the failing Send_an_email_(V2) step
{
HTTP stuff ...,
"body": {
"Attachments": [
{
"ContentBytes": "",
"Name": "report.csv"
}
],
"Body": "<p></p>",
"Subject": "Report",
"To": "me#mail.com"
}
}
Raw output for the failing Send_an_email_(V2) step
{
HTTP stuff ...,
"body": {
"status": 400,
"message": "Parameter 'Attachment Content' cannot be null or empty.\r\nclientRequestId: 887e3968-c7e9-4c35-b588-f76fd0e51545",
"error": {
"message": "Parameter 'Attachment Content' cannot be null or empty."
},
"source": "office365-ase.azconn-ase.p.azurewebsites.net"
}
}
How do I handle this? Do I need to implement my own null handling for the "ContentBytes" of the Send_an_email_(V2) input? If so, how do I do that? Or is there another way to handle this. I want an empty email to be sent when there is no CSV content.
I figured out that #{base64(body('Create_CSV_table'))} is an Azure Logic Apps expression (denoted by #), containing functions that act on the JSON for the body of the output of Create_CSV_table and that the enclosing {} results in the output of the expression being a string.
After the Last step you can send to storage account using Create blob(V2) Connection with .csv file extension and then send the same blob content using Get blob content(V2) Connection then Send an email.
Here are the screenshots of LogicApp
In outlook:
Here is the workflow
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Create_CSV_table_2": {
"inputs": {
"format": "CSV",
"from": "#variables('Array')"
},
"runAfter": {
"For_each_2": [
"Succeeded"
]
},
"type": "Table"
},
"Create_blob_(V2)": {
"inputs": {
"body": "#body('Create_CSV_table_2')",
"headers": {
"ReadFileMetadataFromServer": true
},
"host": {
"connection": {
"name": "#parameters('$connections')['azureblob']['connectionId']"
}
},
"method": "post",
"path": "/v2/datasets/#{encodeURIComponent(encodeURIComponent('AccountNameFromSettings'))}/files",
"queries": {
"folderPath": "/ch1container2408",
"name": "TestSample1.csv",
"queryParametersSingleEncoded": true
}
},
"runAfter": {
"Create_CSV_table_2": [
"Succeeded"
]
},
"runtimeConfiguration": {
"contentTransfer": {
"transferMode": "Chunked"
}
},
"type": "ApiConnection"
},
"For_each_2": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "Array",
"value": {
"created_at": "#items('For_each_2')['created_at']",
"funds": "#items('For_each_2')['funds']",
"id": "#items('For_each_2')['id']",
"pair": "#items('For_each_2')['market']",
"price": "#items('For_each_2')['price']",
"side": "#items('For_each_2')['side']",
"volume": "#items('For_each_2')['volume']"
}
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_JSON')",
"runAfter": {
"Initialize_variable_2": [
"Succeeded"
]
},
"type": "Foreach"
},
"Get_blob_content_(V2)": {
"inputs": {
"host": {
"connection": {
"name": "#parameters('$connections')['azureblob']['connectionId']"
}
},
"method": "get",
"path": "/v2/datasets/#{encodeURIComponent(encodeURIComponent('AccountNameFromSettings'))}/files/#{encodeURIComponent(encodeURIComponent('/<Your Container>','/<Your File>.csv'))}/content",
"queries": {
"inferContentType": true
}
},
"runAfter": {
"Create_blob_(V2)": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"HTTP": {
"inputs": {
"method": "GET",
"uri": "https://api.wazirx.com/api/v2/trades?market=btcusdt"
},
"runAfter": {},
"type": "Http"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "Array",
"type": "array"
}
]
},
"runAfter": {
"Parse_JSON": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Initialize_variable_2": {
"inputs": {
"variables": [
{
"name": "Table",
"type": "array"
}
]
},
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Parse_JSON": {
"inputs": {
"content": "#body('HTTP')",
"schema": {
"items": {
"properties": {
"created_at": {
"type": "string"
},
"funds": {
"type": "string"
},
"id": {
"type": "integer"
},
"market": {
"type": "string"
},
"price": {
"type": "string"
},
"side": {},
"volume": {
"type": "string"
}
},
"required": [
"id",
"market",
"price",
"volume",
"funds",
"created_at",
"side"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"HTTP": [
"Succeeded"
]
},
"type": "ParseJson"
},
"Send_an_email_(V2)": {
"inputs": {
"body": {
"Attachments": [
{
"ContentBytes": "#{base64(body('Get_blob_content_(V2)'))}",
"Name": "#body('Create_blob_(V2)')?['Name']"
}
],
"Body": "<p>TABLE TEST</p>",
"Subject": "Test",
"To": "<To Address>"
},
"host": {
"connection": {
"name": "#parameters('$connections')['office365']['connectionId']"
}
},
"method": "post",
"path": "/v2/Mail"
},
"runAfter": {
"Get_blob_content_(V2)": [
"Succeeded"
]
},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"manual": {
"inputs": {},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {
"$connections": {
"value": {
"azureblob": {
"connectionId": "<Your ConnectionId>",
"connectionName": "azureblob",
"id": "<Id>"
},
"office365": {
"connectionId": "<Your ConnectionId>",
"connectionName": "office365",
"id": "<Your Id>"
}
}
}
}
}

Transform property name for JSON of current bitcoin market data

UPDATE - I've amended the original question so that I'm now supplying code that can be used to easily set up a test Azure Logic App in Azure and repeat my scenario.
I have an Azure Logic App to retrieve Crypto market data from a public API and convert it to a CSV table.
I'm performing an API request by using an HTTP action in the workflow. The action does a GET of https://api.wazirx.com/api/v2/trades?market=btcusdt. This results in a response with the following structure:
[{"id":239067645,"market":"btcusdt","price":"61999.0","volume":"0.00005","funds":"3.09995","created_at":"2021-10-25T04:00:38Z","side":null}, {"id":239065383,"market":"btcusdt","price":"61966.0","volume":"0.00021","funds":"13.01286","created_at":"2021-10-25T03:57:25Z","side":null}]
I want to transform the property name of one of the properties from the API response ("market") to a custom property name ("pair"), so that I get the following as the input into the 'Create CSV table' action of my Azure Logic App:
[{"id":239067645,"pair":"btcusdt","price":"61999.0","volume":"0.00005","funds":"3.09995","created_at":"2021-10-25T04:00:38Z","side":null},{"id":239065383,"pair":"btcusdt","price":"61966.0","volume":"0.00021","funds":"13.01286","created_at":"2021-10-25T03:57:25Z","side":null}]
I'm currently using the following workflow and source code. The source code can be directly used to create the Logic App workflow in Azure using the 'consumption' SKU for an Azure Logic App. The workflow is failing at the 'Create CSV table step' as it's getting null from the body of the 'For each'.
How can I adapt my workflow code so that the input into the 'Create CSV table' action is the desired input?
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Create_CSV_table": {
"inputs": {
"format": "CSV",
"from": "#body('For_each')"
},
"runAfter": {
"For_each": [
"Succeeded"
]
},
"type": "Table"
},
"For_each": {
"actions": {
"Compose": {
"inputs": {
"pair": "#items('For_each')?['market']"
},
"runAfter": {},
"type": "Compose"
}
},
"foreach": "#body('Parse_JSON')",
"runAfter": {
"Parse_JSON": [
"Succeeded"
]
},
"type": "Foreach"
},
"HTTP": {
"inputs": {
"method": "GET",
"uri": "https://api.wazirx.com/api/v2/trades?market=btcusdt"
},
"runAfter": {},
"type": "Http"
},
"Parse_JSON": {
"inputs": {
"content": "#body('HTTP')",
"schema": {
"items": {
"properties": {
"created_at": {
"type": "string"
},
"funds": {
"type": "string"
},
"id": {
"type": "integer"
},
"market": {
"type": "string"
},
"price": {
"type": "string"
},
"side": {},
"volume": {
"type": "string"
}
},
"required": [
"id",
"market",
"price",
"volume",
"funds",
"created_at",
"side"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"HTTP": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
}
}
Try to use Logic App expression to handle JSON array object as string and then transfer it back to JSON array.
More information for logic app expression: https://learn.microsoft.com/en-us/azure/logic-apps/workflow-definition-language-functions-reference
Please try this logic app code:
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Compose": {
"inputs": "#json(replace(string(body('HTTP')),'\"market\"','\"pair\"'))",
"runAfter": {
"HTTP": [
"Succeeded"
]
},
"type": "Compose"
},
"Create_CSV_table": {
"inputs": {
"format": "CSV",
"from": "#outputs('Compose')"
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "Table"
},
"HTTP": {
"inputs": {
"method": "GET",
"uri": "https://api.wazirx.com/api/v2/trades?market=btcusdt"
},
"runAfter": {},
"type": "Http"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
}
}
This is how I achieved your requirement
Since Create CSV table only takes Array values as input. I have initialized an array variable and Added Append to array variable connector instead of compose then passing the same to Create CSV table.
Here are the logic app screenshots
Here is the workflow
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_each": {
"actions": {
"Create_CSV_table": {
"inputs": {
"format": "CSV",
"from": "#variables('Array')"
},
"runAfter": {},
"type": "Table"
}
},
"foreach": "#variables('Array')",
"runAfter": {
"For_each_2": [
"Succeeded"
]
},
"type": "Foreach"
},
"For_each_2": {
"actions": {
"Append_to_array_variable": {
"inputs": {
"name": "Array",
"value": {
"created_at": "#items('For_each_2')['created_at']",
"funds": "#items('For_each_2')['funds']",
"id": "#items('For_each_2')['id']",
"pair": "#items('For_each_2')['market']",
"price": "#items('For_each_2')['price']",
"side": "#items('For_each_2')['side']",
"volume": "#items('For_each_2')['volume']"
}
},
"runAfter": {},
"type": "AppendToArrayVariable"
}
},
"foreach": "#body('Parse_JSON')",
"runAfter": {
"Initialize_variable": [
"Succeeded"
]
},
"type": "Foreach"
},
"HTTP": {
"inputs": {
"method": "GET",
"uri": "https://api.wazirx.com/api/v2/trades?market=btcusdt"
},
"runAfter": {},
"type": "Http"
},
"Initialize_variable": {
"inputs": {
"variables": [
{
"name": "Array",
"type": "array"
}
]
},
"runAfter": {
"Parse_JSON": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Parse_JSON": {
"inputs": {
"content": "#body('HTTP')",
"schema": {
"items": {
"properties": {
"created_at": {
"type": "string"
},
"funds": {
"type": "string"
},
"id": {
"type": "integer"
},
"market": {
"type": "string"
},
"price": {
"type": "string"
},
"side": {},
"volume": {
"type": "string"
}
},
"required": [
"id",
"market",
"price",
"volume",
"funds",
"created_at",
"side"
],
"type": "object"
},
"type": "array"
}
},
"runAfter": {
"HTTP": [
"Succeeded"
]
},
"type": "ParseJson"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {},
"kind": "Http",
"type": "Request"
}
}
}
}

Resources