I am trying to teach myself nodejs and expressjs, however coming from java and c++ this is proving difficult to get used to.
I made a simple and messy module that it is supposed to return a weather forecast for a given zip code.
The way this happens is by taking the user zip code and using a google api to generate the geo coordinates for that zip code. I get the coordinates from the JASON file and then provide them to the next api call, this call is done to the forecast.io api and this time the weather data for the location is also taken from a JASON file.
Coming from java and with a not so solid background on JavaScript I am having a hard time making these two functions wait for one another, in this case I need the google api call to finish first because the coordinates it will provide are needed for the second api call. Can someone take a look at this code and tell me if the strategy I used is correct/ provide a suggestion so that I can know what is done in javascript in situations like this.
here is the code:
// The required modules.
var http = require("http");
var https = require("https");
//result object
var resultSet = {
latitude :"",
longitude:"",
localInfo:"",
weather:"",
humidity:"",
pressure:"",
time:""
};
//print out error messages
function printError(error){
console.error(error.message);
}
//Forecast API required information:
//key for the forecast IO app
var forecast_IO_Key = "this is my key, not publishing for security reasons";
var forecast_IO_Web_Adress = "https://api.forecast.io/forecast/";
//Create Forecast request string function
function createForecastRequest(latitude, longitude){
var request = forecast_IO_Web_Adress + forecast_IO_Key + "/"
+ latitude +"," + longitude;
return request;
}
//Google GEO API required information:
//Create Google Geo Request
var google_GEO_Web_Adress = "https://maps.googleapis.com/maps/api/geocode/json?address=";
function createGoogleGeoMapRequest(zipCode){
var request = google_GEO_Web_Adress+zipCode + "&sensor=false";
return request;
}
function get(zipCode){
// 1- Need to request google for geo locations using a given zip
var googleRequest = https.get(createGoogleGeoMapRequest(zipCode), function(response){
//console.log(createGoogleGeoMapRequest(zipCode));
var body = "";
var status = response.statusCode;
//a- Read the data.
response.on("data", function(chunk){
body+=chunk;
});
//b- Parse the data.
response.on("end", function(){
if(status === 200){
try{
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
}catch(error){
printError(error.message);
}finally{
connectToForecastIO(resultSet.latitude,resultSet.longitude);
}
}else{
printError({message: "Error with GEO API"+http.STATUS_CODES[response.statusCode]})
}
});
});
function connectToForecastIO(latitude,longitude){
var forecastRequest = https.get(createForecastRequest(latitude,longitude),function(response){
// console.log(createForecastRequest(latitude,longitude));
var body = "";
var status = response.statusCode;
//read the data
response.on("data", function(chunk){
body+=chunk;
});
//parse the data
response.on("end", function(){
try{
var weatherReport = JSON.parse(body);
resultSet.weather = weatherReport.currently.summary;
resultSet.humidity = weatherReport.currently.humidity;
resultSet.temperature = weatherReport.currently.temperature;
resultSet.pressure = weatherReport.currently.pressure;
resultSet.time = weatherReport.currently.time;
}catch(error){
printError(error.message);
}finally{
return resultSet;
}
});
});
}
}
//define the name of the outer module.
module.exports.get = get;
is the return statement properly placed? Is my use of finally proper in here? Please notice that I come from a java background and in java is perfectly fine to use the try{} catch(){} and finally{} blocks to execute closure code, it was the only way i managed this module to work. But now that i have incorporated some Express and I try to execute this module's method from another module, all I am getting is an undefined return.
You could use the Promise API, kind of like Futures in Java, so basically what you could do is wrap both functions in promises and the you could wait for resolve to execute the next function
var googleRequest = function(zipcode) {
return new Promise(function(resolve, reject) {
var request = https.get(createGoogleGeoMapRequest(zipCode), function(response) {
if (response.statusCode !== 200) {
reject(new Error('Failed to get request status:' + response.statusCode));
}
var body = "";
//a- Read the data.
response.on("data", function(chunk) {
body+=chunk;
});
//b- Parse the data.
response.on("end", function(body) {
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
resolve(resultSet);
})
});
request.on('error', function(err) {
reject(err);
});
});
}
After that you could just do
googleRequest(90210).then(function(result) {
connectToForecastIO(result.latitude, result.longitude);
}
You can find out more about Promise's usage in the Promise API docs
You should also note that there are several libraries available that allow for promise based http requests such as fetch
Related
It seems quite new, but just hoping someone here has been able to use nodejs to write directly to BigQuery storage using #google-cloud/bigquery-storage.
There is an explanation of how the overall backend API works and how to write a collection of rows atomically using BigQuery Write API but no such documentation for nodejs yet. A recent release 2.7.0 documents the addition of said feature but there is no documentation, and the code is not easily understood.
There is an open issue requesting an example but thought I'd try my luck to see if anyone has been able to use this API yet.
Suppose you have a BigQuery table called student with three columns id,name and age. Following steps will get you to load data into the table with nodejs storage write api.
Define student.proto file as follows
syntax = "proto2";
message Student {
required int64 id = 1;
optional string name = 2;
optional int64 age = 3;
}
Run the following at the command prompt
protoc --js_out=import_style=commonjs,binary:. student.proto
It should generate student_pb.js file in the current directory.
Write the following js code in the current directory and run it
const {BigQueryWriteClient} = require('#google-cloud/bigquery-storage').v1;
const st = require('./student_pb.js')
const type = require('#google-cloud/bigquery-storage').protos.google.protobuf.FieldDescriptorProto.Type
const mode = require('#google-cloud/bigquery-storage').protos.google.cloud.bigquery.storage.v1.WriteStream.Type
const storageClient = new BigQueryWriteClient();
const parent = `projects/${project}/datasets/${dataset}/tables/student`
var writeStream = {type: mode.PENDING}
var student = new st.Student()
var protoDescriptor = {}
protoDescriptor.name = 'student'
protoDescriptor.field = [{'name':'id','number':1,'type':type.TYPE_INT64},{'name':'name','number':2,'type':type.TYPE_STRING},{'name':'age','number':3,'type':type.TYPE_INT64}]
async function run() {
try {
var request = {
parent,
writeStream
}
var response = await storageClient.createWriteStream(request);
writeStream = response[0].name
var serializedRows = []
//Row 1
student.setId(1)
student.setName('st1')
student.setAge(15)
serializedRows.push(student.serializeBinary())
//Row 2
student.setId(2)
student.setName('st2')
student.setAge(15)
serializedRows.push(student.serializeBinary())
var protoRows = {
serializedRows
}
var proto_data = {
writerSchema: {protoDescriptor},
rows: protoRows
}
// Construct request
request = {
writeStream,
protoRows: proto_data
};
// Insert rows
const stream = await storageClient.appendRows();
stream.on('data', response => {
console.log(response);
});
stream.on('error', err => {
throw err;
});
stream.on('end', async () => {
/* API call completed */
try {
var response = await storageClient.finalizeWriteStream({name: writeStream})
response = await storageClient.batchCommitWriteStreams({parent,writeStreams: [writeStream]})
}
catch(err) {
console.log(err)
}
});
stream.write(request);
stream.end();
}
catch(err) {
console.log(err)
}
}
run();
Make sure your environment variables are set correctly to point to the file containing google cloud credentials.
Change project and dataset values accordingly.
I'm tyring to upload a static json file into an indexedDB ONLY when an upgrade is needed (i.e. onupgradeneeded). I've search for answers to this repeatedly but have yet to see code examples of how to approach this.
My current code below gets the json file every time the page opens, which is of course inefficient since I only need to get the json file if the indexedDB has not yet been created or needs upgraded.
I tried putting the xhr.onload section into the end of the .onupgradeneeded function, but as many have noted, the .onsuccess gets called before the xhr.onload has completed.
var jsonUrl = '/path/to/hcLookup.json');
var req, db, hcObjectStore, objectStore, data, dataArr, trans, addreq, key;
var xhr = new XMLHttpRequest();
xhr.open("GET", jsonUrl, true);
xhr.type='json';
xhr.send();
xhr.onload = function(msg) {
data = msg.target.response;
req = window.indexedDB.open("hcLookup", 1);
req.onerror=function(event){console.log("onerror: " + event.target.errorCode)};
req.onsuccess = function(event){
console.log("ready.");
};
req.onupgradeneeded = function(event){
db = event.target.result;
objectStore = db.createObjectStore("hcLookup", {autoIncrement: true});
objectStore.createIndex("S", "S", {unique: false});
// make sure the objectStore creation is finished before adding data into it
objectStore.transaction.oncomplete = function (event) {
// Store values in the newly created objectStore.
trans = db.transaction(["hcLookup"], "readwrite");
hcObjectStore = trans.objectStore("hcLookup");
// Do something when all the data is added to the database.
trans.oncomplete = function (event) {
console.log("upgrading done!");
};
trans.onerror = function (event) {
console.log("bulk add onerror: " + event.target.errorCode)
};
//convert JSON to an strArray in order to add the dataArr into to the objectStore
dataArr = JSON.parse(data);
for (var i in dataArr) {
addreq = hcObjectStore.add(dataArr[i]);
}
};
};
};
I'm currently creating a library for an API. The endpoints have optional tags, and so I'm trying to create a way to use them in the functions.
import * as request from "request";
class Api {
key: string;
constructor(userInput: string) {
this.key = userInput;
}
champions(tags: object) {
Object.keys(tags).forEach(function(key) {
console.log(key + " = " + tags[key])
})
request(`https://api.champion.gg/v2/champions?api_key=${this.key}&${tags}`, function (error, response, body) {
if(!error && response.statusCode == 200) {
let info = JSON.parse(body)
}
});
}
}
var test = new Api("key")
test.champions({"champData": ["kda", "damage"], "rawr": ["xd", "lmao"]})
So far, the combining of Object.keys and forEach has allowed me to get the response of champData=kda,damage and rawr=xd,lmao, however, I need to be able to assign these to a variable that's usable in the URL. How can I get this to work?
Another issue that may occur later on is that, between each tag, there needs to be an & symbol, but not at the end. I apologize for throwing multiple problems into one, but because this is my first experience with something like this, I'm having many issues.
You can use Object.entries() and URLSearchParams()
const tags = {a:1, b:2, c:3};
const params = new URLSearchParams();
const key = "def";
Object.entries(tags).forEach(([key, prop]) => params.set(key, prop));
const url = `https://api.champion.gg/v2/champions?api_key=${key}&${params.toString()}`;
console.log(url);
I am not sure if the issue I am having is a limitation in redis itself or in the nodejs 'redis' module implementation.
var redis = require('redis');
var client = redis.createClient(6379,'192.168.200.5');
client.on('error',function (error) {
console.log("** error in connection **");
process.exit(1);
});
client.on('connect',function () {
console.log("** connected **");
client.on('message',function (channel,message) {
if (channel == 'taskqueue') {
console.log(channel + ' --> ' + message);
var params = message.split(' ');
var inputf = params[0];
var outputf = params[1];
var dim = inputf.split('_').slice(-1)[0];
client.rpush('records',message,function (e,reply) {
});
}
});
client.subscribe('taskqueue');
});
From the code snippet above, I tried to do an RPUSH inside an "ON MESSAGE" subscription event. It does not work, and I get a client 'ON ERROR' event, thus, it prints error in connection. What is the correct way to do this?
After further searching, I came across this page https://github.com/phpredis/phpredis/issues/365 which seems to explain the scenario.
I have set up a basic test page for OpenTok using API Key, Session and Token (for publisher). Is based on the QuickStart with code added to track the microphoneLevelChanged event. Page code is available here. The important lines are:
var apiKey = "API KEY HERE";
var sessionId = "SESSION ID HERE";
var token = "TOKEN HERE";
function sessionConnectedHandler(event) {
session.publish(publisher);
subscribeToStreams(event.streams);
}
function subscribeToStreams(streams) {
for (var i = 0; i < streams.length; i++) {
var stream = streams[i];
if (stream.connection.connectionId != session.connection.connectionId) {
session.subscribe(stream);
}
}
}
function streamCreatedHandler(event) {
subscribeToStreams(event.streams);
TB.log("test log stream created: " + event);
}
var pubProps = { reportMicLevels: true };
var publisher = TB.initPublisher(apiKey, null, pubProps);
var session = TB.initSession(sessionId);
session.publish(publisher);
session.addEventListener("sessionConnected", sessionConnectedHandler);
session.addEventListener("streamCreated", streamCreatedHandler);
session.addEventListener("microphoneLevelChanged", microphoneLevelChangedHandler);
session.connect(apiKey, token);
function microphoneLevelChangedHandler(event) {
TB.log("The microphone level for stream " + event.streamId + " is: " + event.volume);
}
I know that the logging works, as the logs show up from streamCreatedHandler. However, I am not getting any events logged in the microphoneLevelChangedHandler function. I have tried this with both one and two clients loading the pages (videos show up just fine).
What do I need to do to get the microphoneLevelChanged events to show up?
OpenTok's WebRTC js library does not have a microphoneLevelChanged event so there is nothing you can do, sorry.