I created a WCF data service which return JSON format
public static void InitializeService(DataServiceConfiguration config)
{
// TODO: set rules to indicate which entity sets and service operations are visible, updatable, etc.
// Examples:
config.SetEntitySetAccessRule("", EntitySetRights.AllRead);
config.SetServiceOperationAccessRule("", ServiceOperationRights.All);
config.DataServiceBehavior.MaxProtocolVersion = DataServiceProtocolVersion.V2;
}
i used titanium to connect to web service to get data
var label= Titanium.UI.createLabel({
top:300
});
var data = [];
// Connecting to WCF data service
var xhr = Titanium.Network.createHTTPClient();
var theURL = 'http://localhost:4338/DataService.svc/Orders?$format=json';
var json;
xhr.onload = function (){
var json = JSON.parse(this.responseText);
};
for (var i = 0 ; i < json.length; i++)
{
data.push(json[i]);
}
label.text = data[0].OrderID;
xhr.open('Get',theURL);
what is wrong
First of all you have to be aware that HTTP requests are asynchchronous, which means although it will take some time to fetch the response (depending on the internet connection, server speed, data size etc.), the succeeding code is executed. If the request was synchronous, it would block the app and any user interaction while it loads. That is why the client provides callbacks that are fired on certain state changes.
Your code
var data = [];
// Connecting to WCF data service
var xhr = Titanium.Network.createHTTPClient();
// Just guessing, but is the $ necessary?
var theURL = 'http://localhost:4338/DataService.svc/Orders?$format=json';
// You are defining json here 'globally'
var json;
xhr.onload = function (){
// You are defining the json variable within the scope of
// the onload function, so it can't be accessed from outside
// the function. Moreover, you are overwriting json of the global scope
// within this function
var json = JSON.parse(this.responseText);
};
// At this point, json is undefined
// Moreover, this code is executed before xhr.onload fires
for (var i = 0 ; i < json.length; i++)
{
data.push(json[i]);
}
label.text = data[0].OrderID;
// Method should be GET not Get
xhr.open('Get',theURL);
How it should work
var data = [];
var theURL = 'http://localhost:4338/DataService.svc/Orders?format=json';
var xhr = Titanium.Network.createHTTPClient();
xhr.onload = function (){
// Assuming that you have a valid json response
var json = JSON.parse(this.responseText);
for (var i=0; i < json.length; i++) {
data.push(json[i]);
}
// For testing, otherwise make sure it won't break if your response is empty
label.text = data[0].OrderID;
};
xhr.open('GET', theURL);
Related
I'm tyring to upload a static json file into an indexedDB ONLY when an upgrade is needed (i.e. onupgradeneeded). I've search for answers to this repeatedly but have yet to see code examples of how to approach this.
My current code below gets the json file every time the page opens, which is of course inefficient since I only need to get the json file if the indexedDB has not yet been created or needs upgraded.
I tried putting the xhr.onload section into the end of the .onupgradeneeded function, but as many have noted, the .onsuccess gets called before the xhr.onload has completed.
var jsonUrl = '/path/to/hcLookup.json');
var req, db, hcObjectStore, objectStore, data, dataArr, trans, addreq, key;
var xhr = new XMLHttpRequest();
xhr.open("GET", jsonUrl, true);
xhr.type='json';
xhr.send();
xhr.onload = function(msg) {
data = msg.target.response;
req = window.indexedDB.open("hcLookup", 1);
req.onerror=function(event){console.log("onerror: " + event.target.errorCode)};
req.onsuccess = function(event){
console.log("ready.");
};
req.onupgradeneeded = function(event){
db = event.target.result;
objectStore = db.createObjectStore("hcLookup", {autoIncrement: true});
objectStore.createIndex("S", "S", {unique: false});
// make sure the objectStore creation is finished before adding data into it
objectStore.transaction.oncomplete = function (event) {
// Store values in the newly created objectStore.
trans = db.transaction(["hcLookup"], "readwrite");
hcObjectStore = trans.objectStore("hcLookup");
// Do something when all the data is added to the database.
trans.oncomplete = function (event) {
console.log("upgrading done!");
};
trans.onerror = function (event) {
console.log("bulk add onerror: " + event.target.errorCode)
};
//convert JSON to an strArray in order to add the dataArr into to the objectStore
dataArr = JSON.parse(data);
for (var i in dataArr) {
addreq = hcObjectStore.add(dataArr[i]);
}
};
};
};
I am trying to teach myself nodejs and expressjs, however coming from java and c++ this is proving difficult to get used to.
I made a simple and messy module that it is supposed to return a weather forecast for a given zip code.
The way this happens is by taking the user zip code and using a google api to generate the geo coordinates for that zip code. I get the coordinates from the JASON file and then provide them to the next api call, this call is done to the forecast.io api and this time the weather data for the location is also taken from a JASON file.
Coming from java and with a not so solid background on JavaScript I am having a hard time making these two functions wait for one another, in this case I need the google api call to finish first because the coordinates it will provide are needed for the second api call. Can someone take a look at this code and tell me if the strategy I used is correct/ provide a suggestion so that I can know what is done in javascript in situations like this.
here is the code:
// The required modules.
var http = require("http");
var https = require("https");
//result object
var resultSet = {
latitude :"",
longitude:"",
localInfo:"",
weather:"",
humidity:"",
pressure:"",
time:""
};
//print out error messages
function printError(error){
console.error(error.message);
}
//Forecast API required information:
//key for the forecast IO app
var forecast_IO_Key = "this is my key, not publishing for security reasons";
var forecast_IO_Web_Adress = "https://api.forecast.io/forecast/";
//Create Forecast request string function
function createForecastRequest(latitude, longitude){
var request = forecast_IO_Web_Adress + forecast_IO_Key + "/"
+ latitude +"," + longitude;
return request;
}
//Google GEO API required information:
//Create Google Geo Request
var google_GEO_Web_Adress = "https://maps.googleapis.com/maps/api/geocode/json?address=";
function createGoogleGeoMapRequest(zipCode){
var request = google_GEO_Web_Adress+zipCode + "&sensor=false";
return request;
}
function get(zipCode){
// 1- Need to request google for geo locations using a given zip
var googleRequest = https.get(createGoogleGeoMapRequest(zipCode), function(response){
//console.log(createGoogleGeoMapRequest(zipCode));
var body = "";
var status = response.statusCode;
//a- Read the data.
response.on("data", function(chunk){
body+=chunk;
});
//b- Parse the data.
response.on("end", function(){
if(status === 200){
try{
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
}catch(error){
printError(error.message);
}finally{
connectToForecastIO(resultSet.latitude,resultSet.longitude);
}
}else{
printError({message: "Error with GEO API"+http.STATUS_CODES[response.statusCode]})
}
});
});
function connectToForecastIO(latitude,longitude){
var forecastRequest = https.get(createForecastRequest(latitude,longitude),function(response){
// console.log(createForecastRequest(latitude,longitude));
var body = "";
var status = response.statusCode;
//read the data
response.on("data", function(chunk){
body+=chunk;
});
//parse the data
response.on("end", function(){
try{
var weatherReport = JSON.parse(body);
resultSet.weather = weatherReport.currently.summary;
resultSet.humidity = weatherReport.currently.humidity;
resultSet.temperature = weatherReport.currently.temperature;
resultSet.pressure = weatherReport.currently.pressure;
resultSet.time = weatherReport.currently.time;
}catch(error){
printError(error.message);
}finally{
return resultSet;
}
});
});
}
}
//define the name of the outer module.
module.exports.get = get;
is the return statement properly placed? Is my use of finally proper in here? Please notice that I come from a java background and in java is perfectly fine to use the try{} catch(){} and finally{} blocks to execute closure code, it was the only way i managed this module to work. But now that i have incorporated some Express and I try to execute this module's method from another module, all I am getting is an undefined return.
You could use the Promise API, kind of like Futures in Java, so basically what you could do is wrap both functions in promises and the you could wait for resolve to execute the next function
var googleRequest = function(zipcode) {
return new Promise(function(resolve, reject) {
var request = https.get(createGoogleGeoMapRequest(zipCode), function(response) {
if (response.statusCode !== 200) {
reject(new Error('Failed to get request status:' + response.statusCode));
}
var body = "";
//a- Read the data.
response.on("data", function(chunk) {
body+=chunk;
});
//b- Parse the data.
response.on("end", function(body) {
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
resolve(resultSet);
})
});
request.on('error', function(err) {
reject(err);
});
});
}
After that you could just do
googleRequest(90210).then(function(result) {
connectToForecastIO(result.latitude, result.longitude);
}
You can find out more about Promise's usage in the Promise API docs
You should also note that there are several libraries available that allow for promise based http requests such as fetch
I'm trying to store few user data into a roamingFolder method/property of Windows Storage in an app using JavaScript. I'm following a sample code from the Dev Center, but no success. My code snippet is as follows : (OR SkyDrive link for the full project : https://skydrive.live.com/redir?resid=F4CAEFCD620982EB!105&authkey=!AE-ziM-BLJuYj7A )
filesReadCounter: function() {
roamingFolder.getFileAsync(filename)
.then(function (filename) {
return Windows.Storage.FileIO.readTextAsync(filename);
}).done(function (data) {
var dataToRead = JSON.parse(data);
var dataNumber = dataToRead.count;
var message = "Your Saved Conversions";
//for (var i = 0; i < dataNumber; i++) {
message += dataToRead.result;
document.getElementById("savedOutput1").innerText = message;
//}
//counter = parseInt(text);
//document.getElementById("savedOutput2").innerText = dataToRead.counter;
}, function () {
// getFileAsync or readTextAsync failed.
//document.getElementById("savedOutput2").innerText = "Counter: <not found>";
});
},
filesDisplayOutput: function () {
this.filesReadCounter();
}
I'm calling filesDisplayOutput function inside ready method of navigator template's item.js file, to retrieve last session's data. But it always shows blank. I want to save upto 5 data a user may need to save.
I had some trouble running your code as is, but that's tangential to the question. Bottom line, you're not actually reading the file. Note this code, there's no then or done to execute when the promise is fulfilled.
return Windows.Storage.FileIO.readTextAsync(filename);
I hacked this in your example solution and it's working... typical caveats of this is not production code :)
filesReadCounter: function () {
roamingFolder.getFileAsync(filename).then(
function (filename) {
Windows.Storage.FileIO.readTextAsync(filename).done(
function (data) {
var dataToRead = JSON.parse(data);
var dataNumber = dataToRead.count;
var message = "Your Saved Conversions";
//for (var i = 0; i < dataNumber; i++) {
message += dataToRead.result;
document.getElementById("savedOutput1").innerText = message;
//}
//counter = parseInt(text);
//document.getElementById("savedOutput2").innerText = dataToRead.counter;
}, function () {
// readTextAsync failed.
//document.getElementById("savedOutput2").innerText = "Counter: <not found>";
});
},
function () {
// getFileAsync failed
})
},
I am trying to fill remote data into picker, but it crashes.
here is the code:
var countryDataArray = [];
var picker_country = Ti.UI.createPicker
({
bottom:'-251dp'
});
win.add(picker_country);
getCountryList(); //to call web service
//Gets country list from the server
function getCountryList()
{
getCountry.onload = function()
{
var jsonString = JSON.parse(this.responseText);
var msg = jsonString.Message;
var success = jsonString.IsSuccess;
countryDataArray = jsonString.dsetData.CountryList;
Ti.API.log('countryList value:'+countryDataArray);
activity.hide();
if(countryDataArray.length > 0)
{
for (var i=0; i < countryDataArray.length ; i++)
{
data[i] = Ti.UI.createPickerRow(
{
title:countryDataArray[i].Name,
country_id:countryDataArray[i].ID,
fontSize:18
});
};
}
picker_country.add(data);
}
what's wrong with this code ? code works fine with static data !!!
static data :-
var data = [
{title:'Bananas',custom_item:'b',fontSize:18},
{title:'Strawberries',custom_item:'s',fontSize:20},
{title:'Mangos',custom_item:'m',fontSize:22,selected:true},
{title:'Grapes',custom_item:'g',fontSize:24}
];
Solved !!! I Don't why but I just assign the data to picker before adding the picker into the view and it get solved !
picker_country.add(data);
win.add(picker_country);
When trying to read data in Node.js from an ImageMagick child process, it comes out corrupted.
A simple test case would be the following:
var fs = require('fs');
var exec = require('child_process').exec;
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
fs.writeFileSync('test2.jpg', stdout);
});
I would expect that to be the equivalent of the command line convert ./test.jpg - > test2.jpg that does write the binary file correctly.
Originally there was a problem with the maxBuffer option being too small and resulting in a truncated file. After increasing that, the file now appears slightly larger than expected and still corrupted.
The data from stdout is required to send over HTTP.
What would be the correct way to read this data from the ImageMagick stdout?
There were two problems with the initial approach.
The maxBuffer needs to be high enough to handle the whole response from the child process.
Binary encoding needs to be properly set everywhere.
A full working example would be the following:
var fs = require('fs');
var exec = require('child_process').exec;
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
fs.writeFileSync('test2.jpg', stdout, 'binary');
});
Another example, sending the data in an HTTP response using the Express web framework, would like this:
var express = require('express');
var app = express.createServer();
app.get('/myfile', function(req, res) {
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
res.send(new Buffer(stdout, 'binary'));
});
});
Ah, problem is:
If timeout is greater than 0, then it
will kill the child process if it runs
longer than timeout milliseconds. The
child process is killed with
killSignal (default: 'SIGTERM').
maxBuffer specifies the largest amount
of data allowed on stdout or stderr -
if this value is exceeded then the
child process is killed.
Source: http://nodejs.org/docs/v0.4.8/api/child_processes.html#child_process.exec
So if your image is over the default buffer size of 200*1024 bytes, your image is going to be corrupted as you mentioned. I was able to get it to work with the following code:
var fs = require('fs');
var spawn = require('child_process').spawn;
var util = require('util');
var output_file = fs.createWriteStream('test2.jpg', {encoding: 'binary'});
var convert = spawn('convert', ['test.jpg', '-']);
convert.stdout.on('data', function(data) {
output_file.write(data);
});
convert.on('exit', function(code) {
output_file.end();
});
Here I used spawn to get a streamable stdout, then I used a Writeable Stream to write the data in binary format. Just tested it and was able to open the resulting test2.jpg image.
EDIT: Yes you can use this to send the result over HTTP. Here's an example of me downsizing an image with convert, then posting the result to the glowfoto API:
var fs = require('fs');
var http = require('http');
var util = require('util');
var spawn = require('child_process').spawn;
var url = require('url');
// Technically the only reason I'm using this
// is to get the XML parsed from the first call
// you probably don't need this, but just in case:
//
// npm install xml2js
var xml = require('xml2js');
var post_url;
var input_filename = 'giant_image.jpg';
var output_filename = 'giant_image2.jpg';
// The general format of a multipart/form-data part looks something like:
// --[boundary]\r\n
// Content-Disposition: form-data; name="fieldname"\r\n
// \r\n
// field value
function EncodeFieldPart(boundary,name,value) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"\r\n\r\n";
return_part += value + "\r\n";
return return_part;
}
// Same as EncodeFieldPart except that it adds a filename,
// as well as sets the content type (mime) for the part
function EncodeFilePart(boundary,type,name,filename) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"; filename=\"" + filename + "\"\r\n";
return_part += "Content-Type: " + type + "\r\n\r\n";
return return_part;
}
// We could use Transfer-Encoding: Chunked in the headers
// but not every server supports this. Instead we're going
// to build our post data, then create a buffer from it to
// pass to our MakePost() function. This means you'll have
// 2 copies of the post data sitting around
function PreparePost() {
// Just a random string I copied from a packet sniff of a mozilla post
// This can be anything you want really
var boundary = "---------------------------168072824752491622650073";
var post_data = '';
post_data += EncodeFieldPart(boundary, 'type', 'file');
post_data += EncodeFieldPart(boundary, 'thumbnail', '400');
post_data += EncodeFilePart(boundary, 'image/jpeg', 'image', output_filename);
fs.readFile(output_filename, 'binary', function(err,data){
post_data += data;
// This terminates our multi-part data
post_data += "\r\n--" + boundary + "--";
// We need to have our network transfer in binary
// Buffer is a global object
MakePost(new Buffer(post_data, 'binary'));
});
}
function MakePost(post_data) {
var parsed_url = url.parse(post_url);
var post_options = {
host: parsed_url.hostname,
port: '80',
path: parsed_url.pathname,
method: 'POST',
headers : {
'Content-Type' : 'multipart/form-data; boundary=---------------------------168072824752491622650073',
'Content-Length' : post_data.length
}
};
var post_request = http.request(post_options, function(response){
response.setEncoding('utf8');
response.on('data', function(chunk){
console.log(chunk);
});
});
post_request.write(post_data);
post_request.end();
}
// Glowfoto first makes you get the url of the server
// to upload
function GetServerURL() {
var response = '';
var post_options = {
host: 'www.glowfoto.com',
port: '80',
path: '/getserverxml.php'
};
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
// Here we buildup the xml
res.on('data', function (chunk) {
response += chunk;
});
// When we're done, we parse the xml
// Could probably just do string manipulation instead,
// but just to be safe
res.on('end', function(){
var parser = new xml.Parser();
parser.addListener('end', function(result){
// Grab the uploadform element value and prepare our post
post_url = result.uploadform;
PreparePost();
});
// This parses an XML string into a JS object
var xml_object = parser.parseString(response);
});
});
post_req.end();
}
// We use spawn here to get a streaming stdout
// This will use imagemagick to downsize the full image to 30%
var convert = spawn('convert', ['-resize', '30%', input_filename, '-']);
// Create a binary write stream for the resulting file
var output_file = fs.createWriteStream(output_filename, {encoding: 'binary'});
// This just writes to the file and builds the data
convert.stdout.on('data', function(data){
output_file.write(data);
});
// When the process is done, we close off the file stream
// Then trigger off our POST code
convert.on('exit', function(code){
output_file.end();
GetServerURL();
});
Sample result:
$ node test.js
<?xml version="1.0" encoding="utf-8"?>
<upload>
<thumburl>http://img4.glowfoto.com/images/2011/05/29-0939312591T.jpg</thumburl>
<imageurl>http://www.glowfoto.com/static_image/29-093931L/2591/jpg/05/2011/img4/glowfoto</imageurl>
<codes>http://www.glowfoto.com/getcode.php?srv=img4&img=29-093931L&t=jpg&rand=2591&m=05&y=2011</codes>
</upload>
You can also take advantage of io pipes in nodejs
var file = fs.createWritableStream("path-to-file", {encoding: 'binary'});
converter = spawn(cmd, ['parameters ommited']);
converter.stdout.pipe(file); //this will set out stdout.write cal to you file
converter.on('exit', function(){ file.end();});