Run dynamic stored procedure from azure logic app - azure-sql-database

I want to run multiple stored procedures from logic app for Azure SQL database. I want names of the stored procedure to be calculated based on a variable name.
I have a variable with values (API_test1_SP1, API_test2_SP1, API_test3_SP1).
In a for loop, I want to run these stored procedures API_test1, API_test2 and API_test3.
I want to remove _SPI from the variable names and run the stored procedures (API_test1, API_test2, API_test3) for Azure SQL database.
I tried following expression without luck
#{concat(API_,slice(#{variables('variable_name')},1,lastIndexOf('_')))}
Is it possible to run stored procedure like this in logic app?

You can use the below expression to achieve your requirement.
first(split(variables('Array')?[iterationIndexes('Until')],'_SP1'))
To reproduce the issue, I have used the below flow in my logic app.
RESULTS:
Below is the codeview of my logic app
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Initialize_variable_-_array": {
"inputs": {
"variables": [
{
"name": "Array",
"type": "array",
"value": [
"API_test1_SP1",
"API_test2_SP1",
"API_test3_SP1"
]
}
]
},
"runAfter": {},
"type": "InitializeVariable"
},
"Initialize_variable_-_loop": {
"inputs": {
"variables": [
{
"name": "loop",
"type": "integer",
"value": 0
}
]
},
"runAfter": {
"Initialize_variable_-_array": [
"Succeeded"
]
},
"type": "InitializeVariable"
},
"Until": {
"actions": {
"Compose": {
"inputs": "#first(split(variables('Array')?[iterationIndexes('Until')],'_SP1'))",
"runAfter": {},
"type": "Compose"
},
"Increment_variable": {
"inputs": {
"name": "loop",
"value": 1
},
"runAfter": {
"Compose": [
"Succeeded"
]
},
"type": "IncrementVariable"
}
},
"expression": "#equals(variables('loop'), length(variables('Array')))",
"limit": {
"count": 60,
"timeout": "PT1H"
},
"runAfter": {
"Initialize_variable_-_loop": [
"Succeeded"
]
},
"type": "Until"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {},
"triggers": {
"manual": {
"inputs": {
"schema": {}
},
"kind": "Http",
"type": "Request"
}
}
},
"parameters": {}
}

Related

How to set Datatype in Additional Column in ADF

I need to set datatype for Additional Column with Dynamic Content in Sink in ADF
By default its taking nvarchar(max) from Json obj but I need bigInt
Below is a Json Obj which create table with Additional column
{
"source": {
"type": "SqlServerSource",
"additionalColumns": [
{
"name": "ApplicationId",
"value": 3604509277250831000
}
],
"sqlReaderQuery": "SELECT * from Table A",
"queryTimeout": "02:00:00",
"isolationLevel": "ReadUncommitted",
"partitionOption": "None"
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false,
"tableOption": "autoCreate",
"disableMetricsCollection": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
}
ADF Configuration
After create table Database - column with datatype
If I convert Dynamic content into Int
#int(pipeline().parameters.application.applicationId)
Then getting below warning
Please let me know how can I set Datatype in ADF
I also tried the same and getting same result.
By default its taking nvarchar(max) from Json obj but I need bigInt
To resolve this when you add additional column in your source data set and in Mapping click onimport schema it will import the schema of the source and also give you additional column in schema you have to change the type of the column as Int64 as shown in below image. in below image you can see after name there is additional means it is an additional column.
After this run your pipeline, It will create additional column with data type bigint .
{
"name": "pipeline2",
"properties": {
"activities": [
{
"name": "Copy data1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "JsonSource",
"additionalColumns": [
{
"name": "name",
"value": {
"value": "#pipeline().parameters.demo.age",
"type": "Expression"
}
}
],
"storeSettings": {
"type": "AzureBlobFSReadSettings",
"recursive": true,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "JsonReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false,
"tableOption": "autoCreate",
"disableMetricsCollection": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"mappings": [
{
"source": {
"path": "$['taskId']"
},
"sink": {
"name": "taskId",
"type": "String"
}
},
{
"source": {
"path": "$['taskObtainedScore']"
},
"sink": {
"name": "taskObtainedScore",
"type": "String"
}
},
{
"source": {
"path": "$['multiInstance']"
},
"sink": {
"name": "multiInstance",
"type": "String"
}
},
{
"source": {
"path": "$['name']"
},
"sink": {
"name": "name",
"type": "Int64"
}
}
],
"collectionReference": ""
}
},
"inputs": [
{
"referenceName": "Json1",
"type": "DatasetReference"
}
],
"outputs": [
{
"referenceName": "AzureSqlTable1",
"type": "DatasetReference"
}
]
}
],
"parameters": {
"demo": {
"type": "object",
"defaultValue": {
"name": "John",
"age": 30,
"isStudent": true
}
}
},
"annotations": []
}
}
OUTPUT:

Logic App with azure monitor and conditions

I create a workflow with logicAPP. The goal is to notify a team when patch is missing for VM. I use azure monitor in the logic app to set the query. I decided to put after the Azure Monitor , a condition to know if the query table is empty or have data. if the table is empty, the logix is true , so it does'nt send notification, and when its false , it sends notification.
When I run , I got a logic errors. Normally , the table has not data but after condition , the function empty([my_table]) returns false and sends me notification with the result ("The query yielded no data")
what is the problem ??
Thanks
Based on the above shared requirement we have created the logic app & tested it our local environment , it is working fine.
Below is the complete logic code :
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Condition_2": {
"actions": {
"Terminate_2": {
"inputs": {
"runStatus": "Cancelled"
},
"runAfter": {},
"type": "Terminate"
}
},
"else": {
"actions": {
"Send_an_email_(V2)_2": {
"inputs": {
"body": {
"Body": "<p>#{base64ToString(body('Run_query_and_visualize_results')?['body'])}</p>",
"Subject": "list of vm from update management ",
"To": "<UserEmailId>"
},
"host": {
"connection": {
"name": "#parameters('$connections')['office365']['connectionId']"
}
},
"method": "post",
"path": "/v2/Mail"
},
"runAfter": {},
"type": "ApiConnection"
}
}
},
"expression": {
"and": [
{
"equals": [
"#length(body('Run_query_and_visualize_results')?['body'])",
0
]
}
]
},
"runAfter": {
"Run_query_and_visualize_results": [
"Succeeded"
]
},
"type": "If"
},
"Run_query_and_visualize_results": {
"inputs": {
"body": "Update\n| where Classification == 'Security Updates' or Classification == 'Critical Updates'\n| where UpdateState == 'Needed'\n| summarize by Computer,ResourceGroup,Classification,UpdateState\n|sort by Computer",
"host": {
"connection": {
"name": "#parameters('$connections')['azuremonitorlogs']['connectionId']"
}
},
"method": "post",
"path": "/visualizeQuery",
"queries": {
"resourcegroups": "<Resource_group_Name",
"resourcename": "<log analytics workspacename",
"resourcetype": "Log Analytics Workspace",
"subscriptions": "<subcription_id>",
"timerange": "Last 12 hours",
"visType": "Html Table"
}
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"evaluatedRecurrence": {
"frequency": "Hour",
"interval": 3
},
"recurrence": {
"frequency": "Hour",
"interval": 3
},
"type": "Recurrence"
}
}
},
"parameters": {
"$connections": {
"value": {
"azuremonitorlogs": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/azuremonitorlogs",
"connectionName": "azuremonitorlogs",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/azuremonitorlogs"
},
"office365": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/office365",
"connectionName": "office365",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/office365"
}
}
}
}
}
please find the reference output of the above logic sample run :

Azure Policy (deployifnotexists) not behaving as expected

This is my first post here. What I'm trying to do in Azure is deployifnotexists for storage accounts if certain settings are not enabled. I've attached my code. What I want to do is this:
Check for secure transfer being enabled
Check for TLS1_2 only
Check the FW
On the FW, have the Azure Services accepted (e.g. nsg flow logs etc)
If any of those conditions are not met, then deploy them through the ARM template. What is catching me is that I have intentionally put in bad settings to see it work and it will not say that they are non-compliant.
{
"mode": "All",
"policyRule": {
"if": {
"field": "type",
"equals": "Microsoft.Storage/storageAccounts"
},
"then": {
"effect": "deployIfNotExists",
"details": {
"type": "Microsoft.Storage/storageAccounts",
"roleDefinitionIds": [
"/providers/Microsoft.Authorization/roleDefinitions/b24988ac-6180-42a0-ab88-20f7382dd24c"
],
"existenceCondition": {
"allOf": [
{
"field": "Microsoft.Storage/storageAccounts/supportsHttpsTrafficOnly",
"equals": true
},
{
"field": "Microsoft.Storage/storageAccounts/minimumTlsVersion",
"equals": "TLS1_2"
},
{
"field": "Microsoft.Storage/storageAccounts/networkAcls.defaultAction",
"equals": "deny"
},
{
"field": "Microsoft.Storage/storageAccounts/networkAcls.bypass",
"contains": "AzureServices"
}
]
},
"deployment": {
"properties": {
"mode": "incremental",
"template": {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"storageAccountName": {
"type": "String",
"metadata": {
"description": "storageAccountName"
}
},
"location": {
"type": "String",
"metadata": {
"description": "location"
}
}
},
"variables": {},
"resources": [
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2019-06-01",
"name": "[parameters('storageAccountName')]",
"location": "[parameters('location')]",
"properties": {
"minimumTlsVersion": "TLS1_2",
"networkAcls": {
"bypass": "AzureServices",
"defaultAction": "Deny"
},
"supportsHttpsTrafficOnly": true
}
}
],
"outputs": {}
},
"parameters": {
"storageAccountName": {
"value": "[field('Name')]"
},
"location": {
"value": "[field('location')]"
}
}
}
}
}
}
},
"parameters": {}
}
Thanks everyone
So through further reading and talking with more experienced colleagues I've determined that "deployIfNotExists" conditions are not to be used for a resources own settings.
By that I mean I cannot "deployIfNotExists" to a storage accounts storage account settings (as above) but i could deploy diagnostic logging to a SA. I am closing this question. I will try append and if I do anything good I'll loop it back in to this question for keen eyes.

How to extent an object with 1 parameter in json-schema

I wrote one JSON schema before, but now, as I am trying to make it a bit more advanced I get stuck.
(I am open to 'good practice' tips in the comments)
(Is the $id optional? should I remove it for simplicity in the example code?)
Goal:
I am trying to make a schema with an object definition (example_obj) that is recursively used. This object may only have 1 argument (or or and or value). But in the root of the json, I want to add 1 additional property.
json-schema
{
"definitions": {
"example_obj": {
"$id": "#/definitions/example_obj",
"type": "object",
"maxProperties": 1,
"properties": {
"or": {
"$id": "#/definitions/example_obj/properties/or",
"type": "array",
"items": {
"$id": "#/definitions/example_obj/properties/or/items",
"$ref": "#/definitions/example_obj"
}
},
"and": {
"$id": "#/definitions/example_obj/properties/and",
"type": "array",
"items": {
"$id": "#/definitions/example_obj/properties/and/items",
"$ref": "#/definitions/example_obj"
}
},
"value": {
"$id": "#/definitions/example_obj/properties/value",
"type": "string"
}
}
}
},
"type": "object",
"title": "The Root Schema",
"required": ["filter_version"],
"allOf": [
{
"$ref": "#/definitions/example_obj"
},
{
"properties": {
"filter_version": {
"$id": "#/properties/filter_version",
"type": "string",
"pattern": "^([0-9]+\\.[0-9]+)$"
}
}
}
]
}
json which I want to pass validation:
{
"filter_version": "1.0",
"or": [
{
"and": [
{
"value": "subject"
}
]
},
{
"or": [
{
"value": "another subject"
}
]
}
]
}
Issue:
When I try to extend example_obj for the root definition it seems to fail because the example_obj object does not allow more then 1 property by design.
In other words, it appears that every check for the number of argument that I add to example_obj is also performed on the additional property (i.e. filter_version).
Does anyone know where to place this check for 'exactly 1 argument' so that it is not evaluated on the root object?
Attempts:
I tried working with different ways of determining the requirements of example_obj, but with no success. Like with replacing "maxProperties": 1 with:
"oneOf": [
{
"required": [
"or"
]
},
{
"required": [
"and"
]
},
{
"required": [
"where"
]
},
{
"required": [
"where not"
]
}
],
Thanks in advance for any help!!
Checking my schema with the online schema validator.
(In the end I need to validate it in Python, in case it matters)
You can use oneOf instead of maxProperties to get around this.
{
"type": "object",
"properties": {
"filter_version": {
"type": "string",
"pattern": "^([0-9]+\\.[0-9]+)$"
}
},
"required": ["filter_version"],
"allOf": [{ "$ref": "#/definitions/example_obj" }],
"definitions": {
"example_obj": {
"type": "object",
"properties": {
"or": { "$ref": "#/definitions/example-obj-array" },
"and": { "$ref": "#/definitions/example-obj-array" },
"value": { "type": "string" }
},
"oneOf": [
{ "required": ["or"] },
{ "required": ["and"] },
{ "required": ["value"] }
]
},
"example-obj-array": {
"type": "array",
"items": { "$ref": "#/definitions/example_obj" }
}
}
}
P.S. You are using $id wrong. I know there is a tool out there that generates schemas like this and causes this confusion. The way $id is used here is a no-op. It doesn't hurt, but it doesn't do anything other than bloating your schema.

After running my logic app, it shows some error message as" "We couldn't convert to Number.\r\n "

I am collecting data from sensors and upload to AZURE cosmo. My logic app on AZURE keep failing and show the following message
{
"status": 400,
"message": "We couldn't convert to Number.\r\n inner exception: We
couldn't convert to Number.\r\nclientRequestId:xxxxxxx",
"source": "sql-ea.azconn-ea.p.azurewebsites.net"
}
Below are the input data from cosmo. I saw that the input data has shown "ovf" and "inf". I have tried convert the data type of that column to other data type like bigiant and numeric and resubmitted. Still did not fixed that.
{
"Device": "DL084",
"Device_Group": "DLL",
"Time": "2019-09-04T11:45:20.0000000",
"acc_x_avg": "ovf",
"acc_x_stdev": "inf",
"acc_y_avg": "3832.88",
"acc_y_stdev": "2850.45",
"acc_z_avg": "13304.38",
"acc_z_stdev": "2289.86",
"cc_volt": "3.900293",
"cp_volt": "1.940371",
"fp_volt": "0.718475",
"id": "xxxxxxxxxxxxxxxxxxxx",
"ls_volt": "4.882698",
"millis": "1073760.00",
"rs_rpm": "0.00",
"smp_volt": "1.070381"
}
I have also tried to convert the data to string. And it will show
"Failed to execute query. Error: Operand type clash: numeric is
incompatible with ntext"
The question is how can I eliminate this error? How can I know the error is definitely due to the "inf" and "ovf" error?
The logic app code is as below
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_each": {
"actions": {
"Delete_a_document": {
"inputs": {
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "delete",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/docs/#{encodeURIComponent(items('For_each')['id'])}"
},
"runAfter": {
"Insert_row": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"Insert_row": {
"inputs": {
"body": {
"Device": "#{item()['device']}",
"Device_Group": "#{items('For_each')['devicegroup']}",
"Time": "#{addHours(addSeconds('1970-01-01 00:00:00', int(items('For_each')['time'])), 8)}",
"acc_x_avg": "#items('For_each')['acc_x_avg']",
"acc_x_stdev": "#items('For_each')['acc_x_stdev']",
"acc_y_avg": "#items('For_each')['acc_y_avg']",
"acc_y_stdev": "#items('For_each')['acc_y_stdev']",
"acc_z_avg": "#items('For_each')['acc_z_avg']",
"acc_z_stdev": "#items('For_each')['acc_z_stdev']",
"cc_volt": "#items('For_each')['cc_volt']",
"cp_volt": "#items('For_each')['cp_volt']",
"fp_volt": "#items('For_each')['fp_volt']",
"id": "#{guid()}",
"ls_volt": "#items('For_each')['ls_volt']",
"millis": "#items('For_each')['millis']",
"rs_rpm": "#items('For_each')['rs_rpm']",
"smp_volt": "#items('For_each')['smp_volt']"
},
"host": {
"connection": {
"name": "#parameters('$connections')['sql']['connectionId']"
}
},
"method": "post",
"path": "/datasets/default/tables/#{encodeURIComponent(encodeURIComponent('[dbo].[RCD]'))}/items"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"foreach": "#body('Query_documents')?['Documents']",
"runAfter": {
"Query_documents": [
"Succeeded"
]
},
"type": "Foreach"
},
"Query_documents": {
"inputs": {
"body": {
"query": "SELECT \t\t * \nFROM \t\t\tc \nWHERE \t\t\tc.devicegroup = 'DLL' ORDER BY c._ts"
},
"headers": {
"x-ms-max-item-count": 1000
},
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "post",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/query"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"recurrence": {
"frequency": "Minute",
"interval": 30
},
"type": "Recurrence"
}
}
}
}