Azure IoT Hub sql query - azure-iot-hub

I am trying to query the IoT hub devices twins using query language. I have the following code snippet which is not working.I am not getting any results. When i replace dt with some hard coded date then i will get the device list. Is it like I cant pass a variable using this queries to hub? please help me.
var dt = new Date();
dt.setDate( dt.getDate() - 4 );
console.log(dt);
var query = registry.createQuery('SELECT * FROM devices where lastActivityTime > dt', 100);
var onResults = function(err, results) {
if (err) {
console.error('Failed to fetch the results: ' + err.message);
} else {
// Do something with the results
results.forEach(function(twin) {
console.log(twin.deviceId);
});
if (query.hasMoreResults) {
query.nextAsTwin(onResults);
}
}
};

You can achieve what you want by using a JavaScript template string - note the use of ` and ' in the example:
var dt = new Date();
dt.setDate( dt.getDate() - 3);
var dateString = dt.toISOString();
var query = registry.createQuery(`SELECT * FROM devices WHERE lastActivityTime > '${dateString}'`, 100);

Related

Write rows to BigQuery via nodejs BigQuery Storage Write API

It seems quite new, but just hoping someone here has been able to use nodejs to write directly to BigQuery storage using #google-cloud/bigquery-storage.
There is an explanation of how the overall backend API works and how to write a collection of rows atomically using BigQuery Write API but no such documentation for nodejs yet. A recent release 2.7.0 documents the addition of said feature but there is no documentation, and the code is not easily understood.
There is an open issue requesting an example but thought I'd try my luck to see if anyone has been able to use this API yet.
Suppose you have a BigQuery table called student with three columns id,name and age. Following steps will get you to load data into the table with nodejs storage write api.
Define student.proto file as follows
syntax = "proto2";
message Student {
required int64 id = 1;
optional string name = 2;
optional int64 age = 3;
}
Run the following at the command prompt
protoc --js_out=import_style=commonjs,binary:. student.proto
It should generate student_pb.js file in the current directory.
Write the following js code in the current directory and run it
const {BigQueryWriteClient} = require('#google-cloud/bigquery-storage').v1;
const st = require('./student_pb.js')
const type = require('#google-cloud/bigquery-storage').protos.google.protobuf.FieldDescriptorProto.Type
const mode = require('#google-cloud/bigquery-storage').protos.google.cloud.bigquery.storage.v1.WriteStream.Type
const storageClient = new BigQueryWriteClient();
const parent = `projects/${project}/datasets/${dataset}/tables/student`
var writeStream = {type: mode.PENDING}
var student = new st.Student()
var protoDescriptor = {}
protoDescriptor.name = 'student'
protoDescriptor.field = [{'name':'id','number':1,'type':type.TYPE_INT64},{'name':'name','number':2,'type':type.TYPE_STRING},{'name':'age','number':3,'type':type.TYPE_INT64}]
async function run() {
try {
var request = {
parent,
writeStream
}
var response = await storageClient.createWriteStream(request);
writeStream = response[0].name
var serializedRows = []
//Row 1
student.setId(1)
student.setName('st1')
student.setAge(15)
serializedRows.push(student.serializeBinary())
//Row 2
student.setId(2)
student.setName('st2')
student.setAge(15)
serializedRows.push(student.serializeBinary())
var protoRows = {
serializedRows
}
var proto_data = {
writerSchema: {protoDescriptor},
rows: protoRows
}
// Construct request
request = {
writeStream,
protoRows: proto_data
};
// Insert rows
const stream = await storageClient.appendRows();
stream.on('data', response => {
console.log(response);
});
stream.on('error', err => {
throw err;
});
stream.on('end', async () => {
/* API call completed */
try {
var response = await storageClient.finalizeWriteStream({name: writeStream})
response = await storageClient.batchCommitWriteStreams({parent,writeStreams: [writeStream]})
}
catch(err) {
console.log(err)
}
});
stream.write(request);
stream.end();
}
catch(err) {
console.log(err)
}
}
run();
Make sure your environment variables are set correctly to point to the file containing google cloud credentials.
Change project and dataset values accordingly.

What is the preferred way of working with dates and JavaScript?

When working with a Faunadb record that contains a date value, I struggled with using that date in JavaScript. Eventually I got it working like so:
project.shipDate = new Date(await client.query(q.Format('%t', project.shipDate)));
This seems fine, but I also noticed I could do this:
let test = JSON.parse(JSON.stringify(project.created));
console.log(test);
datetest = new Date(test["#date"]);
Which seems wonky (grin), but may be quicker as it's not using the Fauna client library. Which should I prefer?
The JS driver has several helper classes that let you cast the javascript Time and Date objects to their FQL counterparts.
Here is an example.
From Fauna to JS
You can create a new document that contains a date value.
const project = await client.query(
q.Create(
q.Collection("projects"),
{ data: { shipDate: q.ToDate(q.Now()) } }
)
)
You can retrieve the date value and use as a javascript Date object by using the value property
const shipDate = new Date(project.data.shipDate.value)
From JS to Fauna
The date value can be modified however you want, and you can pass it back to FQL using the values.FaunaDate class.
const { values } = require('faunadb')
/* ... */
let nextDay = new Date(shipDate.getTime() + 86400000)
const faunaDate = new values.FaunaDate(nextDay)
Full Example
const project = await client.query(
q.Create(
q.Collection("projects"),
{ data: { shipDate: q.ToDate(q.Now()) } }
)
)
console.log(project)
const shipDate = new Date(project.data.shipDate.value)
console.log(shipDate)
let nextDay = new Date(shipDate.getTime() + 86400000)
console.log(nextDay)
const projectRef = project.ref
const projectUpdate = await client.query(
q.Update(
projectRef,
{ data: { shipDate: new values.FaunaDate(nextDay) } }
)
)
console.log(projectUpdate)
{
ref: Ref(Collection("projects"), "307924674409398337"),
ts: 1629918703380000,
data: { shipDate: Date("2021-08-25") }
}
2021-08-25T00:00:00.000Z
2021-08-26T00:00:00.000Z
{
ref: Ref(Collection("projects"), "307924674409398337"),
ts: 1629918703470000,
data: { shipDate: Date("2021-08-26") }
}

Get data from multiple sites in Analytics

I need to change this code so that I have data from multiple sites in Analytics, ordered by date.
The code below works perfectly, but only for an Analytics account. I need to automate this, to get data from multiple sites in the same account.
function start(){
ScriptApp.newTrigger("getGoogleAnalyticsData").timeBased().everyDays(1).create();
}
function onOpen() {
var ui = SpreadsheetApp.getUi();
ui.createMenu("Get external data")
.addItem("Google Analytics", "getGoogleAnalyticsData")
.addToUi();
}
function getGoogleAnalyticsData() {
var date = new Date();
var startDate = "2020-01-01";
var endDate = "2020-12-31";
var tableId = 'ga:201010452';
var metric = 'ga:totalPublisherRevenue';
var options = {
'dimensions': 'ga:date',
'sort': '-ga:date',
'filters': 'ga:medium!==organic',
'max-results': 425
};
var report = Analytics.Data.Ga.get(tableId, startDate, endDate, metric,options);
if (report.rows) {
var spreadsheet = SpreadsheetApp.getActive();
var sheet = spreadsheet.getActiveSheet();
var headers = report.columnHeaders.map(function(columnHeader) {
return columnHeader.name;
});
sheet.appendRow(headers);
sheet.getRange(2, 1, report.rows.length, headers.length)
.setValues(report.rows);
} else {
Logger.log('No rows returned.');
}
}
You can use Analytics Management API (https://developers.google.com/analytics/devguides/config/mgmt/v3) to get profile list from your account and adapt the code to cycle and query the views of interest.

BigQuery Result save as google sheets through api

In Google BigQuery WebUI, it shows query result screen after executing a query, and it shows the button of "Save as Google Sheets". I like this feature but would like to automate this, is there such function through the REST API that I could do?
It doesn’t seem like there is a straightforward way to do this directly with the BigQuery API. There are few workarounds for this though:
You can use the BigQuery API to query your data and then the GoogleSheets API to upload it to Google Sheets.
You can use Google Apps Script. If you go to this link, you click on “New Script”, you can run the code below. You can adapt this to your needs. You can also add a trigger to run the script every hour/minute …
Here the code snippet from this link:
function runQuery() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'XXXXXXXX';
var request = {
query: 'SELECT TOP(word, 300) AS word, COUNT(*) AS word_count ' +
'FROM publicdata:samples.shakespeare WHERE LENGTH(word) > 10;'
};
var queryResults = BigQuery.Jobs.query(request, projectId);
var jobId = queryResults.jobReference.jobId;
// Check on status of the Query Job.
var sleepTimeMs = 500;
while (!queryResults.jobComplete) {
Utilities.sleep(sleepTimeMs);
sleepTimeMs *= 2;
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId);
}
// Get all the rows of results.
var rows = queryResults.rows;
while (queryResults.pageToken) {
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId, {
pageToken: queryResults.pageToken
});
rows = rows.concat(queryResults.rows);
}
if (rows) {
var spreadsheet = SpreadsheetApp.create('BiqQuery Results');
var sheet = spreadsheet.getActiveSheet();
// Append the headers.
var headers = queryResults.schema.fields.map(function(field) {
return field.name;
});
sheet.appendRow(headers);
// Append the results.
var data = new Array(rows.length);
for (var i = 0; i < rows.length; i++) {
var cols = rows[i].f;
data[i] = new Array(cols.length);
for (var j = 0; j < cols.length; j++) {
data[i][j] = cols[j].v;
}
}
sheet.getRange(2, 1, rows.length, headers.length).setValues(data);
Logger.log('Results spreadsheet created: %s',
spreadsheet.getUrl());
} else {
Logger.log('No rows returned.');
}
}

Worklight JsonStore advanced find

How to use advanced find in worklight JSONStore using QueryPart?
I have tried the following code but its not working properly, I doubt if I am calling advancedFind correctly.
var query = WL.JSONStore.QueryPart().equal('age', 35);
var collectionName = "people";
WL.JSONStore.get(collectionName).find(query).then(function(arrayResults) {
// if data not present , get the data from DB
if (arrayResults.length == 0) {
} else {
}
}).fail(function(errorObject) {
alert("fail" + errorObject);
// handle failure
});
You are calling the find() method. The one you want to call is advancedFind(). Also, advancedFind receives an array of query parts, not just one query part. Your code should look like this:
var queryPart = WL.JSONStore.QueryPart().equal('age', 35);
var collectionName = "people";
WL.JSONStore.get(collectionName).advancedFind([queryPart]).then(function(arrayResults) {
// if data not present , get the data from DB
if (arrayResults.length == 0) {
} else {
}
}).fail(function(errorObject) {
alert("fail" + errorObject);
// handle failure
});
For future reference, here is the API and some examples on how to use the Javascript JSONStore API.