I'm trying to get the correct JSON payload to consume via JSON REST a Tensorflow Serve model, that is supped to have as input an array with an array of two float.
Here is the medatata:
{
"model_spec": {
"name": "saved_model.pb",
"signature_name": "",
"version": "3"
},
"metadata": {
"signature_def": {
"signature_def": {
"serving_default": {
"inputs": {
"inputs": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "-1",
"name": ""
},
{
"size": "23",
"name": ""
},
{
"size": "2",
"name": ""
}
],
"unknown_rank": false
},
"name": "lstm_1_input:0"
}
},
"outputs": {
"prediction": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "-1",
"name": ""
},
{
"size": "1",
"name": ""
}
],
"unknown_rank": false
},
"name": "dense_1/BiasAdd:0"
}
},
"method_name": "tensorflow/serving/predict"
}
}
}
}
}
I tried many different JSON structures but for the moment nothing likes to Tensorflow Serve.
What is needed to uniquely identify the necessary payload?
Related
I need to set datatype for Additional Column with Dynamic Content in Sink in ADF
By default its taking nvarchar(max) from Json obj but I need bigInt
Below is a Json Obj which create table with Additional column
{
"source": {
"type": "SqlServerSource",
"additionalColumns": [
{
"name": "ApplicationId",
"value": 3604509277250831000
}
],
"sqlReaderQuery": "SELECT * from Table A",
"queryTimeout": "02:00:00",
"isolationLevel": "ReadUncommitted",
"partitionOption": "None"
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false,
"tableOption": "autoCreate",
"disableMetricsCollection": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
}
ADF Configuration
After create table Database - column with datatype
If I convert Dynamic content into Int
#int(pipeline().parameters.application.applicationId)
Then getting below warning
Please let me know how can I set Datatype in ADF
I also tried the same and getting same result.
By default its taking nvarchar(max) from Json obj but I need bigInt
To resolve this when you add additional column in your source data set and in Mapping click onimport schema it will import the schema of the source and also give you additional column in schema you have to change the type of the column as Int64 as shown in below image. in below image you can see after name there is additional means it is an additional column.
After this run your pipeline, It will create additional column with data type bigint .
{
"name": "pipeline2",
"properties": {
"activities": [
{
"name": "Copy data1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "0.12:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "JsonSource",
"additionalColumns": [
{
"name": "name",
"value": {
"value": "#pipeline().parameters.demo.age",
"type": "Expression"
}
}
],
"storeSettings": {
"type": "AzureBlobFSReadSettings",
"recursive": true,
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "JsonReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"writeBehavior": "insert",
"sqlWriterUseTableLock": false,
"tableOption": "autoCreate",
"disableMetricsCollection": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"mappings": [
{
"source": {
"path": "$['taskId']"
},
"sink": {
"name": "taskId",
"type": "String"
}
},
{
"source": {
"path": "$['taskObtainedScore']"
},
"sink": {
"name": "taskObtainedScore",
"type": "String"
}
},
{
"source": {
"path": "$['multiInstance']"
},
"sink": {
"name": "multiInstance",
"type": "String"
}
},
{
"source": {
"path": "$['name']"
},
"sink": {
"name": "name",
"type": "Int64"
}
}
],
"collectionReference": ""
}
},
"inputs": [
{
"referenceName": "Json1",
"type": "DatasetReference"
}
],
"outputs": [
{
"referenceName": "AzureSqlTable1",
"type": "DatasetReference"
}
]
}
],
"parameters": {
"demo": {
"type": "object",
"defaultValue": {
"name": "John",
"age": 30,
"isStudent": true
}
}
},
"annotations": []
}
}
OUTPUT:
I'm having trouble serving a tensorflow object detection model. I trained a model from the tensorflow model repo, and have set up a tensorflow-serving instance. But when I make a request, there's an issue with dimensionality. I'm using the the tolist() method to convert the numpy array of the image into something that the json encoder can use. The tolist() function appears to maintain the structure of the numpy array by having the list recursive inside each other, so I'm not sure where tf-serving is getting a tensor with shape [339450,3]. Do I have to specify the shape of the image when I make the request?
The error:
Data: {"signature_name": "serving_default", "instances": ... 58, 63], [35, 59, 63], [37, 58, 63], [43, 67, 71]]]}
{'error': 'Specified a list with shape [?,?,3] from a tensor with shape [339450,3]\n\t [[{{function_node __inference_call_func_9686}}{{node map/TensorArrayUnstack/TensorListFromTensor}}]]'}
The code to make the request:
import requests
import json
from PIL import Image
import numpy
# Load image
img = Image.open("Hilarious-Car-License-Plates-1.jpg")
img_np = numpy.array(img.getdata())
img_np.resize(tuple([1] + list(img_np.shape)))
data = json.dumps({"signature_name": "serving_default", "instances": img_np.tolist()})
print('Data: {} ... {}'.format(data[:50], data[len(data)-52:]))
headers = {"content-type": "application/json"}
json_response = requests.post('http://localhost:8501/v1/models/plate_detect:predict', data=data, headers=headers)
response = json.loads(json_response.text)
print(response)
Model metadata:
{
"model_spec":{
"name": "plate_detect",
"signature_name": "",
"version": "1"
}
,
"metadata": {"signature_def": {
"signature_def": {
"serving_default": {
"inputs": {
"input_tensor": {
"dtype": "DT_UINT8",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "-1",
"name": ""
},
{
"size": "-1",
"name": ""
},
{
"size": "3",
"name": ""
}
],
"unknown_rank": false
},
"name": "serving_default_input_tensor:0"
}
},
"outputs": {
"detection_boxes": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "100",
"name": ""
},
{
"size": "4",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:1"
},
"raw_detection_boxes": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "1917",
"name": ""
},
{
"size": "4",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:6"
},
"detection_scores": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "100",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:4"
},
"raw_detection_scores": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "1917",
"name": ""
},
{
"size": "2",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:7"
},
"detection_anchor_indices": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "100",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:0"
},
"detection_multiclass_scores": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "100",
"name": ""
},
{
"size": "2",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:3"
},
"detection_classes": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
},
{
"size": "100",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:2"
},
"num_detections": {
"dtype": "DT_FLOAT",
"tensor_shape": {
"dim": [
{
"size": "1",
"name": ""
}
],
"unknown_rank": false
},
"name": "StatefulPartitionedCall:5"
}
},
"method_name": "tensorflow/serving/predict"
},
"__saved_model_init_op": {
"inputs": {},
"outputs": {
"__saved_model_init_op": {
"dtype": "DT_INVALID",
"tensor_shape": {
"dim": [],
"unknown_rank": true
},
"name": "NoOp"
}
},
"method_name": ""
}
}
}
}
}
I managed to fix this by switching to https://tfhub.dev/tensorflow/ssd_mobilenet_v2/2
I saved my model like this:
MODULE_HANDLE = 'https://tfhub.dev/tensorflow/ssd_mobilenet_v2/2'
ts = int(time.time())
detector = hub.load(MODULE_HANDLE)
file_path = "./models/object_detector/{}/".format(str(ts))
tf.saved_model.save(detector, file_path)
We are currently using GA4 data API and faced the issue when custom dimensions returns value "(not set)".
We were using the following article to set custom dimension for the session count, but we still receiving "(not set)" values.
Example of request:
{
"dateRanges": [
{
"startDate": "2021-09-01",
"endDate": "2021-09-05"
}
],
"offset": 0,
"limit": 100,
"dimensionFilter": {
"filter": {
"fieldName": "eventName",
"stringFilter": {
"matchType": 1,
"value": "screen_view",
"caseSensitive": true
}
}
},
"dimensions": [
{
"name": "customUser:applicationID"
},
{
"name": "customEvent:ga_session_number"
},
{
"name": "dateHour"
},
{
"name": "platform"
},
{
"name": "sessionSource"
},
{
"name": "sessionMedium"
},
{
"name": "sessionCampaignName"
},
{
"name": "deviceCategory"
}
],
"metrics": [
{
"name": "userEngagementDuration"
}
]
}
Does anybody have any idea why it may happen?
I create a workflow with logicAPP. The goal is to notify a team when patch is missing for VM. I use azure monitor in the logic app to set the query. I decided to put after the Azure Monitor , a condition to know if the query table is empty or have data. if the table is empty, the logix is true , so it does'nt send notification, and when its false , it sends notification.
When I run , I got a logic errors. Normally , the table has not data but after condition , the function empty([my_table]) returns false and sends me notification with the result ("The query yielded no data")
what is the problem ??
Thanks
Based on the above shared requirement we have created the logic app & tested it our local environment , it is working fine.
Below is the complete logic code :
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Condition_2": {
"actions": {
"Terminate_2": {
"inputs": {
"runStatus": "Cancelled"
},
"runAfter": {},
"type": "Terminate"
}
},
"else": {
"actions": {
"Send_an_email_(V2)_2": {
"inputs": {
"body": {
"Body": "<p>#{base64ToString(body('Run_query_and_visualize_results')?['body'])}</p>",
"Subject": "list of vm from update management ",
"To": "<UserEmailId>"
},
"host": {
"connection": {
"name": "#parameters('$connections')['office365']['connectionId']"
}
},
"method": "post",
"path": "/v2/Mail"
},
"runAfter": {},
"type": "ApiConnection"
}
}
},
"expression": {
"and": [
{
"equals": [
"#length(body('Run_query_and_visualize_results')?['body'])",
0
]
}
]
},
"runAfter": {
"Run_query_and_visualize_results": [
"Succeeded"
]
},
"type": "If"
},
"Run_query_and_visualize_results": {
"inputs": {
"body": "Update\n| where Classification == 'Security Updates' or Classification == 'Critical Updates'\n| where UpdateState == 'Needed'\n| summarize by Computer,ResourceGroup,Classification,UpdateState\n|sort by Computer",
"host": {
"connection": {
"name": "#parameters('$connections')['azuremonitorlogs']['connectionId']"
}
},
"method": "post",
"path": "/visualizeQuery",
"queries": {
"resourcegroups": "<Resource_group_Name",
"resourcename": "<log analytics workspacename",
"resourcetype": "Log Analytics Workspace",
"subscriptions": "<subcription_id>",
"timerange": "Last 12 hours",
"visType": "Html Table"
}
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"evaluatedRecurrence": {
"frequency": "Hour",
"interval": 3
},
"recurrence": {
"frequency": "Hour",
"interval": 3
},
"type": "Recurrence"
}
}
},
"parameters": {
"$connections": {
"value": {
"azuremonitorlogs": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/azuremonitorlogs",
"connectionName": "azuremonitorlogs",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/azuremonitorlogs"
},
"office365": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/office365",
"connectionName": "office365",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/office365"
}
}
}
}
}
please find the reference output of the above logic sample run :
I am collecting data from sensors and upload to AZURE cosmo. My logic app on AZURE keep failing and show the following message
{
"status": 400,
"message": "We couldn't convert to Number.\r\n inner exception: We
couldn't convert to Number.\r\nclientRequestId:xxxxxxx",
"source": "sql-ea.azconn-ea.p.azurewebsites.net"
}
Below are the input data from cosmo. I saw that the input data has shown "ovf" and "inf". I have tried convert the data type of that column to other data type like bigiant and numeric and resubmitted. Still did not fixed that.
{
"Device": "DL084",
"Device_Group": "DLL",
"Time": "2019-09-04T11:45:20.0000000",
"acc_x_avg": "ovf",
"acc_x_stdev": "inf",
"acc_y_avg": "3832.88",
"acc_y_stdev": "2850.45",
"acc_z_avg": "13304.38",
"acc_z_stdev": "2289.86",
"cc_volt": "3.900293",
"cp_volt": "1.940371",
"fp_volt": "0.718475",
"id": "xxxxxxxxxxxxxxxxxxxx",
"ls_volt": "4.882698",
"millis": "1073760.00",
"rs_rpm": "0.00",
"smp_volt": "1.070381"
}
I have also tried to convert the data to string. And it will show
"Failed to execute query. Error: Operand type clash: numeric is
incompatible with ntext"
The question is how can I eliminate this error? How can I know the error is definitely due to the "inf" and "ovf" error?
The logic app code is as below
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_each": {
"actions": {
"Delete_a_document": {
"inputs": {
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "delete",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/docs/#{encodeURIComponent(items('For_each')['id'])}"
},
"runAfter": {
"Insert_row": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"Insert_row": {
"inputs": {
"body": {
"Device": "#{item()['device']}",
"Device_Group": "#{items('For_each')['devicegroup']}",
"Time": "#{addHours(addSeconds('1970-01-01 00:00:00', int(items('For_each')['time'])), 8)}",
"acc_x_avg": "#items('For_each')['acc_x_avg']",
"acc_x_stdev": "#items('For_each')['acc_x_stdev']",
"acc_y_avg": "#items('For_each')['acc_y_avg']",
"acc_y_stdev": "#items('For_each')['acc_y_stdev']",
"acc_z_avg": "#items('For_each')['acc_z_avg']",
"acc_z_stdev": "#items('For_each')['acc_z_stdev']",
"cc_volt": "#items('For_each')['cc_volt']",
"cp_volt": "#items('For_each')['cp_volt']",
"fp_volt": "#items('For_each')['fp_volt']",
"id": "#{guid()}",
"ls_volt": "#items('For_each')['ls_volt']",
"millis": "#items('For_each')['millis']",
"rs_rpm": "#items('For_each')['rs_rpm']",
"smp_volt": "#items('For_each')['smp_volt']"
},
"host": {
"connection": {
"name": "#parameters('$connections')['sql']['connectionId']"
}
},
"method": "post",
"path": "/datasets/default/tables/#{encodeURIComponent(encodeURIComponent('[dbo].[RCD]'))}/items"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"foreach": "#body('Query_documents')?['Documents']",
"runAfter": {
"Query_documents": [
"Succeeded"
]
},
"type": "Foreach"
},
"Query_documents": {
"inputs": {
"body": {
"query": "SELECT \t\t * \nFROM \t\t\tc \nWHERE \t\t\tc.devicegroup = 'DLL' ORDER BY c._ts"
},
"headers": {
"x-ms-max-item-count": 1000
},
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "post",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/query"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"recurrence": {
"frequency": "Minute",
"interval": 30
},
"type": "Recurrence"
}
}
}
}