Chai to test JSON API output - chai

How can I use Chai to deep parse JSON API output?
JSON API Output:
{
"data": {
"id": "87zc08spyq69",
"type": "data-collector",
"attributes": {
"sensors": [
{
"metadata": {
"sensor_id": 837490,
"unit": "m",
"unit_description": "meters",
"datum": "WGS84",
"offset": -0.001
},
"data": [
{
"time": "2017-12-31T23:59:59Z",
"value": "14.9",
"quality": 4
},
{
"time": "2017-12-31T22:59:59Z",
"value": "12",
"quality": 1
},
{
"time": "2017-12-31T21:59:59Z",
"value": "10",
"quality": 2
}
]
}
]
}
},
"links": {
"self": "/locations/87zc08spyq69/counter/time-series"
}
}
Working So Far:
const chai = require('chai');
const should = chai.should();
const chaiHttp = require('chai-http');
const chaiThings = require('chai-things');
chai.use(chaiHttp);
chai.use(chaiThings);
uri = '/locations/87zc08spyq69/counter/time-series';
describe('/GET', () => {
it('should get location counter in time-series', done => {
chai.request(server)
.get(uri)
.set('Content-Type', "application/vnd.api+json")
.set('Accept', "application/vnd.api+json")
.end((err, res) => {
if (err) {
console.error(err);
done(err,null);
}
should.not.exist(err);
res.status.should.eql(200);
res.type.should.eql("application/vnd.api+json");
res.body.should.have.property('data').has.property('attributes')
.has.property('sensors')
.to.be.an('array')
.should.contain.an('object');
res.body.should.have.property('links').has.property('self').to.be.eql(uri);
done();
});
});
});
I am unable to test any part of the following array:
"data": [
{
"time": "2017-12-31T23:59:59Z",
"value": "14.9",
"quality": 4
},
{
"time": "2017-12-31T22:59:59Z",
"value": "12",
"quality": 1
},
{
"time": "2017-12-31T21:59:59Z",
"value": "10",
"quality": 2
}
]
I have tried chai-things to "deep" test "data", "data/time", "data/value" but I keep getting errors.
How can I test sensors/data, sensors/data/time, sensors/data/value?
Thanks in advance for your help

Its pretty ugly, and there may be a better way to do it, but this worked for me
res.body.should.have.property('data').have.property('attributes')
.have.property('sensors')
.have.deep.property('[0].data').have.deep.property('[0].time').eql("2017-12-31T23:59:59Z");

Related

Logic App with azure monitor and conditions

I create a workflow with logicAPP. The goal is to notify a team when patch is missing for VM. I use azure monitor in the logic app to set the query. I decided to put after the Azure Monitor , a condition to know if the query table is empty or have data. if the table is empty, the logix is true , so it does'nt send notification, and when its false , it sends notification.
When I run , I got a logic errors. Normally , the table has not data but after condition , the function empty([my_table]) returns false and sends me notification with the result ("The query yielded no data")
what is the problem ??
Thanks
Based on the above shared requirement we have created the logic app & tested it our local environment , it is working fine.
Below is the complete logic code :
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Condition_2": {
"actions": {
"Terminate_2": {
"inputs": {
"runStatus": "Cancelled"
},
"runAfter": {},
"type": "Terminate"
}
},
"else": {
"actions": {
"Send_an_email_(V2)_2": {
"inputs": {
"body": {
"Body": "<p>#{base64ToString(body('Run_query_and_visualize_results')?['body'])}</p>",
"Subject": "list of vm from update management ",
"To": "<UserEmailId>"
},
"host": {
"connection": {
"name": "#parameters('$connections')['office365']['connectionId']"
}
},
"method": "post",
"path": "/v2/Mail"
},
"runAfter": {},
"type": "ApiConnection"
}
}
},
"expression": {
"and": [
{
"equals": [
"#length(body('Run_query_and_visualize_results')?['body'])",
0
]
}
]
},
"runAfter": {
"Run_query_and_visualize_results": [
"Succeeded"
]
},
"type": "If"
},
"Run_query_and_visualize_results": {
"inputs": {
"body": "Update\n| where Classification == 'Security Updates' or Classification == 'Critical Updates'\n| where UpdateState == 'Needed'\n| summarize by Computer,ResourceGroup,Classification,UpdateState\n|sort by Computer",
"host": {
"connection": {
"name": "#parameters('$connections')['azuremonitorlogs']['connectionId']"
}
},
"method": "post",
"path": "/visualizeQuery",
"queries": {
"resourcegroups": "<Resource_group_Name",
"resourcename": "<log analytics workspacename",
"resourcetype": "Log Analytics Workspace",
"subscriptions": "<subcription_id>",
"timerange": "Last 12 hours",
"visType": "Html Table"
}
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"evaluatedRecurrence": {
"frequency": "Hour",
"interval": 3
},
"recurrence": {
"frequency": "Hour",
"interval": 3
},
"type": "Recurrence"
}
}
},
"parameters": {
"$connections": {
"value": {
"azuremonitorlogs": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/azuremonitorlogs",
"connectionName": "azuremonitorlogs",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/azuremonitorlogs"
},
"office365": {
"connectionId": "/subscriptions/<subcription-id>/resourceGroups/<resource-group>/providers/Microsoft.Web/connections/office365",
"connectionName": "office365",
"id": "/subscriptions/<subcription-id>/providers/Microsoft.Web/locations/northcentralus/managedApis/office365"
}
}
}
}
}
please find the reference output of the above logic sample run :

After running my logic app, it shows some error message as" "We couldn't convert to Number.\r\n "

I am collecting data from sensors and upload to AZURE cosmo. My logic app on AZURE keep failing and show the following message
{
"status": 400,
"message": "We couldn't convert to Number.\r\n inner exception: We
couldn't convert to Number.\r\nclientRequestId:xxxxxxx",
"source": "sql-ea.azconn-ea.p.azurewebsites.net"
}
Below are the input data from cosmo. I saw that the input data has shown "ovf" and "inf". I have tried convert the data type of that column to other data type like bigiant and numeric and resubmitted. Still did not fixed that.
{
"Device": "DL084",
"Device_Group": "DLL",
"Time": "2019-09-04T11:45:20.0000000",
"acc_x_avg": "ovf",
"acc_x_stdev": "inf",
"acc_y_avg": "3832.88",
"acc_y_stdev": "2850.45",
"acc_z_avg": "13304.38",
"acc_z_stdev": "2289.86",
"cc_volt": "3.900293",
"cp_volt": "1.940371",
"fp_volt": "0.718475",
"id": "xxxxxxxxxxxxxxxxxxxx",
"ls_volt": "4.882698",
"millis": "1073760.00",
"rs_rpm": "0.00",
"smp_volt": "1.070381"
}
I have also tried to convert the data to string. And it will show
"Failed to execute query. Error: Operand type clash: numeric is
incompatible with ntext"
The question is how can I eliminate this error? How can I know the error is definitely due to the "inf" and "ovf" error?
The logic app code is as below
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"For_each": {
"actions": {
"Delete_a_document": {
"inputs": {
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "delete",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/docs/#{encodeURIComponent(items('For_each')['id'])}"
},
"runAfter": {
"Insert_row": [
"Succeeded"
]
},
"type": "ApiConnection"
},
"Insert_row": {
"inputs": {
"body": {
"Device": "#{item()['device']}",
"Device_Group": "#{items('For_each')['devicegroup']}",
"Time": "#{addHours(addSeconds('1970-01-01 00:00:00', int(items('For_each')['time'])), 8)}",
"acc_x_avg": "#items('For_each')['acc_x_avg']",
"acc_x_stdev": "#items('For_each')['acc_x_stdev']",
"acc_y_avg": "#items('For_each')['acc_y_avg']",
"acc_y_stdev": "#items('For_each')['acc_y_stdev']",
"acc_z_avg": "#items('For_each')['acc_z_avg']",
"acc_z_stdev": "#items('For_each')['acc_z_stdev']",
"cc_volt": "#items('For_each')['cc_volt']",
"cp_volt": "#items('For_each')['cp_volt']",
"fp_volt": "#items('For_each')['fp_volt']",
"id": "#{guid()}",
"ls_volt": "#items('For_each')['ls_volt']",
"millis": "#items('For_each')['millis']",
"rs_rpm": "#items('For_each')['rs_rpm']",
"smp_volt": "#items('For_each')['smp_volt']"
},
"host": {
"connection": {
"name": "#parameters('$connections')['sql']['connectionId']"
}
},
"method": "post",
"path": "/datasets/default/tables/#{encodeURIComponent(encodeURIComponent('[dbo].[RCD]'))}/items"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"foreach": "#body('Query_documents')?['Documents']",
"runAfter": {
"Query_documents": [
"Succeeded"
]
},
"type": "Foreach"
},
"Query_documents": {
"inputs": {
"body": {
"query": "SELECT \t\t * \nFROM \t\t\tc \nWHERE \t\t\tc.devicegroup = 'DLL' ORDER BY c._ts"
},
"headers": {
"x-ms-max-item-count": 1000
},
"host": {
"connection": {
"name": "#parameters('$connections')['documentdb']['connectionId']"
}
},
"method": "post",
"path": "/dbs/#{encodeURIComponent('iot')}/colls/#{encodeURIComponent('messages')}/query"
},
"runAfter": {},
"type": "ApiConnection"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"parameters": {
"$connections": {
"defaultValue": {},
"type": "Object"
}
},
"triggers": {
"Recurrence": {
"recurrence": {
"frequency": "Minute",
"interval": 30
},
"type": "Recurrence"
}
}
}
}

Pass an already existing Model to next in Loopback

There is Project model
{
"name": "Project",
"plural": "Projects",
"base": "PersistedModel",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {
"title": {
"type": "string",
"required": true
},
"description": {
"type": "string"
},
"code": {
"type": "string"
},
"startDate": {
"type": "date",
"required": true
},
"endDate": {
"type": "date"
},
"value": {
"type": "number"
},
"infoEN": {
"type": "string"
},
"infoRU": {
"type": "string"
},
"infoAM": {
"type": "string"
},
"externalLinks": {
"type": [
"string"
]
}
},
"validations": [],
"relations": {
"industry": {
"type": "belongsTo",
"model": "Industry",
"foreignKey": "",
"options": {
"nestRemoting": true
}
},
"service": {
"type": "belongsTo",
"model": "Service",
"foreignKey": "",
"options": {
"nestRemoting": true
}
},
"tags": {
"type": "hasAndBelongsToMany",
"model": "Tag",
"foreignKey": "",
"options": {
"nestRemoting": true
}
}
},
"acls": [],
"methods": {}
}
And it hasAndBelongsToMany tags
here is Tag model
{
"name": "Tag",
"plural": "Tags",
"base": "PersistedModel",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {
"name": {
"type": "string",
"required": true
}
},
"validations": [],
"relations": {},
"acls": [],
"methods": {}
}
Now when the relation is created loopback api gives this api endpoint.
POST /Projects/{id}/tags
This creates a new tag into the tags collection and adds it to the project.
But what about adding an already existing tag to the project?
So I figured maybe I add before save hook to the Tag
Here I'll check if the tag exists and then pass the existing one for the relation.
Something like this.
tag.js
'use strict';
module.exports = function(Tag) {
Tag.observe('before save', function(ctx, next) {
console.log(ctx.instance);
Tag.find({name: ctx.instance.name})
next();
});
// Tag.validatesUniquenessOf('name', {message: 'name is not unique'});
};
#HaykSafaryan it just demo to show you how to use tag inside project
var app = require('../../server/server');
module.exports = function(project) {
var tag=app.models.tags
//afterremote it just demo. you can use any method
project.afterRemote('create', function(ctx, next) {
tag.find({name: ctx.instance.name},function(err,result)){
if(err) throw err;
next()
}
});
};
this is just example code to show you how to use update,create ,find ,upsertwithwhere etc. tag for validation you have to setup condition over here it will not take validation which you defined in tags models

Transform json data for DataTables

I have data which looks like this.
{
"data": [
{
"c1": "datapt00",
"size": 40
},
{
"c1": "datapt001",
"size": 80
}
]
}
In HTML I am doing,
$(document).ready(function) {
$('#example').DataTAble ( {
"ajax": {
"url": "/data",
}} ); });
Is ther ean easy way to manipulate my data so its Datatable compliant?
This should do it:
var jsonData = {
"data": [
{
"c1": "datapt00",
"size": 40
},
{
"c1": "datapt001",
"size": 80
}
]
};
$('#example').DataTable({
"ajax": {
"type": 'POST',
"dataType": 'json',
"url": '/echo/json/',
"data": {
"json": JSON.stringify(jsonData)
},
"dataSrc": "data"
},
"columns": [{
"data": "c1"
}, {
"data": "size"
}]
});
Working JSFiddle here: https://jsfiddle.net/annoyingmouse/70d01vo0/

How to use a nested json-based formation value in the jQuery.dataTables?

Now suppose I have a json data formation like this following:
{
"ServiceName": "cacheWebApi",
"Description": "This is a CacheWebApiService",
"IsActive": true,
"Urls": [{ "ServiceAddress": "http://192.168.111.210:8200", "Weight": 5, "IsAvailable": true },
{ "ServiceAddress": ",http://192.168.111.210:8200", "Weight": 3, "IsAvailable": true }]
}
Now what worries me is that the "Urls" is another nested json formation. So how to bind this value to the datatables? And have you got any good ideas (e.g:something like I only wanna show all the ServiceAddress)...
This should do what you need:
var data = [{
"ServiceName": "cacheWebApi",
"Description": "This is a CacheWebApiService",
"IsActive": true,
"Urls": [
{
"ServiceAddress": "http://192.168.111.210:8200",
"Weight": 5,
"IsAvailable": true
},
{
"ServiceAddress": ",http://192.168.111.210:8200",
"Weight": 3,
"IsAvailable": true
}
]
}];
$(function() {
var table = $('#example').dataTable({
"data": data,
"columns": [
{
"data": "ServiceName"
}, {
"data": "Description"
}, {
"data": "IsActive"
}, {
"data": "Urls[0].ServiceAddress"
}, {
"data": "Urls[0].Weight"
}, {
"data": "Urls[0].IsAvailable"
}, {
"data": "Urls[1].ServiceAddress"
}, {
"data": "Urls[1].Weight"
}, {
"data": "Urls[1].IsAvailable"
}
],
});
});
You should put your data in an array though. Working JSFiddle
EDIT
IF the number of Urls isn't defined then you could do something like this:
var table = $('#example').dataTable({
"data": data,
"columns": [
{
"data": "ServiceName"
}, {
"data": "Description"
}, {
"data": "IsActive"
}, {
"data": "Urls",
"render": function(d){
return JSON.stringify(d);
}
}
],
});
I guess that that isn't brilliant but you could do almost anything to that function, for instance:
var table = $('#example').dataTable({
"data": data,
"columns": [
{
"data": "ServiceName"
}, {
"data": "Description"
}, {
"data": "IsActive"
}, {
"data": "Urls",
"render": function(d){
return d.map(function(c){
return c.ServiceAddress
}).join(", ");
}
}
],
});