Worklight call WL.Server.invokeHttp(input) in a loop - ibm-mobilefirst

I am novice to Worklight.
I am trying to merge responses from multiple WL.Server.invokeHttp(input).
e.g.
call1: response1 = WL.Server.invokeHttp(input1)
lets say in response1 I get students(names) list loop for every student
call(n):
response(n) = WL.Server.invokeHttp(student) lets say response(n) I get
the score of student
Now I am trying to merge the score of every student in student list.
Adding code:
function getStudentsMarks() {
path = "/edu/students";
WL.Logger.info("path: "+path);
var input = {
method : 'get',
returnedContentType : 'json',
path : path
};
var response = WL.Server.invokeHttp(input);
var students = response.students;
for (var i = 0; i < students.length; i++) {
var student = students[i];
WL.Logger.info("student id: " + student.id);
resp = getStudentMarks("students/"+student.id);
students[i].marks = resp;
}
return response;
}
function getStudentMarks(path) {
path = "/edu/"+ path;
var input = {
method : 'get',
returnedContentType : 'json',
path : path
};
var response = WL.Server.invokeHttp(input);
return response;
}
Thanks in advance.

Your question is a bit too broad and does not contain any code.
Did you try anything yet?
It is important to remember that procedure code is written in JavaScript. So if you know how to do it in JavaScript, you should be able to do it in procedure code as well.
From what I understand, what you should do is create 1 adapter procedure. This procedure will have the different calls to the different HTTP backend requests. In JavaScript, write any merging logic that you need. At the end of the loop, return the processed data that you want.
Before you go deep into your example, maybe try with just 1 invocation, then try to merge 2. Once you are comfortable writing code, try your solution.
Note however that one HTTP adapter can only connect to 1 backend domain name. So if your example requires multiple domain names, your "mashup" adapter needs to call other adapters.
If all your HTTP requests point to the same domain name, then 1 adapter is enough.
I recommend reading this as well: https://www.ibm.com/developerworks/community/blogs/worklight/entry/handling_backend_responses_in_adapters

Related

Community Connector getData() Request only uses the first two schema fields, not all four

I am building a Community Connector between Google Data Studio and SpyFu.com, in order to funnel SEO information for a specific url into the GDS Dashboard.
However, My getData() request only contains the first two fields from my Schema. As you can see, I have four listed in the code. The result is only the first two fields in the schema are printed to GDS.
I've been through tutorials, official documentation, YouTube videos, looked this issue up on google and checked out the community resources on GitHub.
//Step Two: Define getConfig()
function getConfig(request) {
var cc = DataStudioApp.createCommunityConnector();
var config = cc.getConfig();
config.newInfo()
.setId('instructions')
.setText('Give me SpyFu information on the following domain:');
config.newTextInput()
.setId('domain')
.setName('Enter the domain to search')
.setHelpText('e.g. ebay.com')
.setPlaceholder('ebay.com');
config.newTextInput()
.setId('SECRET_KEY')
.setName('Enter your API Secret Key')
.setHelpText('e.g. A1B2C3D4')
.setPlaceholder('A1B2C3D4');
config.setDateRangeRequired(false);
return config.build();
}
//Step Three: Define getSchema()
function getFields(request) {
var cc = DataStudioApp.createCommunityConnector();
var fields = cc.getFields();
var types = cc.FieldType;
var aggregations = cc.AggregationType;
fields.newDimension()
.setId('Keyword')
.setName('Keywords')
.setDescription('The keywords most often attributed to this domain.')
.setType(types.TEXT);
fields.newMetric()
.setId('Rank')
.setName('Rankings')
.setDescription('The ranking of the target site keyword on the Google Search Page.')
.setType(types.NUMBER);
fields.newMetric()
.setId('Local_Monthly_Searches')
.setName('Local Searches per Month')
.setDescription('Number of times, locally, that people have searched for this term within in the last month.')
.setType(types.NUMBER);
fields.newMetric()
.setId('Global_Monthly_Searches')
.setName('Global Searches per Month')
.setDescription('Number of times, globally, that people have searched for this term within in the last month.')
.setType(types.NUMBER);
return fields;
}
function getSchema(request) {
var fields = getFields(request).build();
return { schema: fields };
}
//Step Four: Define getData()
function responseToRows(requestedFields, response, domain) {
// Transform parsed data and filter for requested fields
return response.map(function(Array) {
var row = [];
requestedFields.asArray().forEach(function (field) {
switch (field.getId()) {
case 'Keyword':
return row.push(Array.term);
case 'Rank':
return row.push(Array.position);
case 'Local_Monthly_Searches':
return row.push(Array.exact_local_monthly_search_volume);
case 'Global_Monthly_Searches':
return row.push(Array.exact_global_monthly_search_volume);
case 'domain':
return row.push(domain);
default:
return row.push('');
}
});
return { values: row };
});
}
function getData(request) {
console.log("Request from Data Studio");
console.log(request);
var requestedFieldIds = request.fields.map(function(field) {
return field.name;
});
var requestedFields = getFields().forIds(requestedFieldIds);
// Fetch data from API
var url = [
'https://www.spyfu.com/apis/url_api/organic_kws?q='
+ request.configParams.domain
+ '&r=20'
+ '&p=[1 TO 10]'
+ '&api_key='
+ request.configParams.SECRET_KEY,
];
try {
var response = UrlFetchApp.fetch(url.join(''));
} catch (e) {
DataStudioApp.createCommunityConnector()
.newUserError()
.setDebugText('Failed URL Fetch Attempt. Exception details: ' + e)
.setText('There was an error accessing this domain. Try again later, or file an issue if this error persists.')
.throwException();
}
console.log("Response from API");
console.log(response);
//Parse data from the API
try {
var parsedResponse = JSON.parse(response);
} catch (e) {
DataStudioApp.createCommunityConnector()
.newUserError()
.setDebugText('Error parsing the JSON data. Exception details: ' + e)
.setText('There was an error parsing the JSON data. Try again later, or file an issue if this error persists.')
.throwException();
}
var rows = responseToRows(requestedFields, parsedResponse);
return {
schema: requestedFields.build(),
rows: rows
};
}
I need the GDS to post four columns of data. They are, "Keyword", "Rank", "Local Monthly Searches" and "Global Monthly searches".
I cannot figure out how to create a "fixed schema" so that the system always prints these four columns of data at every request. The tutorials and various documentation say it's possible, but not how to do it. Please help!
The number of metrics initially called up by the Google Community Connector is handled from the front-end, via Google Data Studio.
The back-end system (the Connector) only initially posts the default dimension and default metric. Getting the rest of the schemas to post should be handled when you are building a report on Google Data Studio. Simply click on the data set, select "data" on the right-hand menu, scroll down to either Metrics or Dimensions, and pick the ones you wish to add to the current set.
Note that these are the fields you established earlier in the coding process, when you were setting up your schemas.
Here, you're filtering your defined schema for fields that are present on the request object received by getData().
var requestedFieldIds = request.fields.map(function(field) {
return field.name;
});
var requestedFields = getFields().forIds(requestedFieldIds);
The visualization in Google Data Studio that is the catalyst for the request will determine which fields are requested.

Pentaho - upload file using API

I need to upload a file using an API.
I tried REST CLIENT and didn't find any options.
Tried with HTTP POST and that responded with 415.
Please suggest how to accomplish this
Error 415 is “Unsupported media type”.
You may need to change the media type of the request or check whether that type of file us accepted by the remote server.
https://en.m.wikipedia.org/wiki/List_of_HTTP_status_codes
This solution uses only standard classes of jre 7. Add a step Modified Java Script Value in your transformation. You will have to add two columns in the flow: URL_FORM_POST_MULTIPART_COLUMN and FILE_URL_COLUMN, you can add as many files as you want, you will just have to call outputStreamToRequestBody.write more times.
try
{
//in this step you will need to add two columns from the previous flow -> URL_FORM_POST_MULTIPART_COLUMN, FILE_URL_COLUMN
var serverUrl = new java.net.URL(URL_FORM_POST_MULTIPART_COLUMN);
var boundaryString = "999aaa000zzz09za";
var openBoundary = java.lang.String.format("\n\n--%s\nContent-Disposition: form-data\nContent-Type: text/xml\n\n" , boundaryString);
var closeBoundary = java.lang.String.format("\n\n--%s--\n", boundaryString);
// var netIPSocketAddress = java.net.InetSocketAddress("127.0.0.1", 8888);
// var proxy = java.net.Proxy(java.net.Proxy.Type.HTTP , netIPSocketAddress);
// var urlConnection = serverUrl.openConnection(proxy);
var urlConnection = serverUrl.openConnection();
urlConnection.setDoOutput(true); // Indicate that we want to write to the HTTP request body
urlConnection.setRequestMethod("POST");
//urlConnection.addRequestProperty("Authorization", "Basic " + Authorization);
urlConnection.addRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundaryString);
var outputStreamToRequestBody = urlConnection.getOutputStream();
outputStreamToRequestBody.write(openBoundary.getBytes(java.nio.charset.StandardCharsets.UTF_8));
outputStreamToRequestBody.write(java.nio.file.Files.readAllBytes(java.nio.file.Paths.get(FILE_URL_COLUMN)));
outputStreamToRequestBody.write(closeBoundary.getBytes(java.nio.charset.StandardCharsets.UTF_8));
outputStreamToRequestBody.flush();
var httpResponseReader = new java.io.BufferedReader(new java.io.InputStreamReader(urlConnection.getInputStream()));
var lineRead = "";
var finalText = "";
while((lineRead = httpResponseReader.readLine()) != null) {
finalText += lineRead;
}
var status = urlConnection.getResponseCode();
var result = finalText;
var time = new Date();
}
catch(e)
{
Alert(e);
}
I solved this by using the solution from http://www.dietz-solutions.com/2017/06/pentaho-data-integration-multi-part.html
Thanks Ben.
He's written a Java class for Multi-part Form submission. I extendd by adding a header for Authorization...

Crunchbase Data API v3.1 to Google Sheets

I'm trying to pull data from the Crunchbase Open Data Map to a Google Spreadsheet. I'm following Ben Collins's script but it no longer works since the upgrade from v3 to v3.1. Anyone had any luck modifying the script for success?
var USER_KEY = 'insert your API key in here';
// function to retrive organizations data
function getCrunchbaseOrgs() {
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('Organizations');
var query = sheet.getRange(3,2).getValue();
// URL and params for the Crunchbase API
var url = 'https://api.crunchbase.com/v/3/odm-organizations?query=' + encodeURI(query) + '&user_key=' + USER_KEY;
var json = getCrunchbaseData(url,query);
if (json[0] === "Error:") {
// deal with error with fetch operation
sheet.getRange(5,1,sheet.getLastRow(),2).clearContent();
sheet.getRange(6,1,1,2).setValues([json]);
}
else {
if (json[0] !== 200) {
// deal with error from api
sheet.getRange(5,1,sheet.getLastRow(),2).clearContent();
sheet.getRange(6,1,1,2).setValues([["Error, server returned code:",json[0]]]);
}
else {
// correct data comes back, filter down to match the name of the entity
var data = json[1].data.items.filter(function(item) {
return item.properties.name == query;
})[0].properties;
// parse into array for Google Sheet
var outputData = [
["Name",data.name],
["Homepage",data.homepage_url],
["Type",data.primary_role],
["Short description",data.short_description],
["Country",data.country_code],
["Region",data.region_name],
["City name",data.city_name],
["Blog url",data.blog_url],
["Facebook",data.facebook_url],
["Linkedin",data.linkedin_url],
["Twitter",data.twitter_url],
["Crunchbase URL","https://www.crunchbase.com/" + data.web_path]
];
// clear any old data
sheet.getRange(5,1,sheet.getLastRow(),2).clearContent();
// insert new data
sheet.getRange(6,1,12,2).setValues(outputData);
// add image with formula and format that row
sheet.getRange(5,2).setFormula('=image("' + data.profile_image_url + '",4,50,50)').setHorizontalAlignment("center");
sheet.setRowHeight(5,60);
}
}
}
This code no longer pulls data as expected.
I couldn't confirm about the error messages when you ran the script. So I would like to show about the clear difference point. It seems that the endpoint was changed from https://api.crunchbase.com/v/3/ to https://api.crunchbase.com/v3.1/. So how about this modification?
From :
var url = 'https://api.crunchbase.com/v/3/odm-organizations?query=' + encodeURI(query) + '&user_key=' + USER_KEY;
To :
var url = 'https://api.crunchbase.com/v3.1/odm-organizations?query=' + encodeURI(query) + '&user_key=' + USER_KEY;
Note :
From your script, I couldn't also find query. So if the script doesn't work even when you modified the endpoint, please confirm about it. You can see the detail of API v3 Compared to API v3.1 is here.
References :
API v3 Compared to API v3.1
Using the API
If this was not useful for you, I'm sorry.

How to pass same parameter with different value

I am trying the following API using Alamofire, but this API has multiple "to" fields. I tried to pass an array of "to" emails as parameters. It shows no error but did not send to all emails. API is correct, I tested that from terminal. Any suggestions will be cordially welcomed.
http -a email:pass -f POST 'sampleUrl' from="email#email.com" to="ongkur.cse#gmail.com" to="emailgmail#email.com" subject="test_sub" bodyText="testing hello"
I am giving my code:
class func sendMessage(message:MessageModel, delegate:RestAPIManagerDelegate?) {
let urlString = "http://localhost:8080/app/user/messages"
var parameters = [String:AnyObject]()
parameters = [
"from": message.messageFrom.emailAddress
]
var array = [String]()
for to in message.messageTO {
array.append(to)
}
parameters["to"] = array
for cc in message.messageCC {
parameters["cc"] = cc.emailAddress;
}
for bcc in message.messageBCC {
parameters["bcc"] = bcc.emailAddress;
}
parameters["subject"] = message.messageSubject;
parameters["bodyText"] = message.bodyText;
Alamofire.request(.POST, urlString, parameters: parameters)
.authenticate(user: MessageManager.sharedInstance().primaryUserName, password: MessageManager.sharedInstance().primaryPassword)
.validate(statusCode: 200..<201)
.validate(contentType: ["application/json"])
.responseJSON {
(_, _, jsonData, error) in
if(error != nil) {
println("\n sendMessage attempt json response:")
println(error!)
delegate?.messageSent?(false)
return
}
println("Server response during message sending:\n")
let swiftyJSONData = JSON(jsonData!)
println(swiftyJSONData)
delegate?.messageSent?(true)
}
}
First of all if you created the API yourself you should consider changing the API to expect an array of 'to' receivers instead of multiple times the same parameter name.
As back2dos states it in this answer: https://stackoverflow.com/a/1898078/672989
Although POST may be having multiple values for the same key, I'd be cautious using it, since some servers can't even properly handle that, which is probably why this isn't supported ... if you convert "duplicate" parameters to a list, the whole thing might start to choke, if a parameter comes in only once, and suddendly you wind up having a string or something ...
And I think he's right.
In this case I guess this is not possible with Alamofire, just as it is not possible with AFNetworking: https://github.com/AFNetworking/AFNetworking/issues/21
Alamofire probably store's its POST parameter in a Dictionary which doesn't allow duplicate keys.

Issue with BTC-e API in App Script, method parameter

I am trying to incorporate the BTC-e.com API in to a google docs spreadsheet.
The API documentation is here: https://btc-e.com/api/documentation
The method name is sent via POST parameter method.
As the URLFetchApp requires me to set the type of request as POST by a parameter method and I then have another parameter called method to be set as getInfo.
How can I go about setting the fetch method as POST and have the API parameter method as getInfo.
Below is the function this relates too. Also I am sure there a more issues in my work I am yet to find.
function inventory() {
var nonce=Number(SpreadsheetApp.getActiveSheet().getRange('K2').getValue());
var token=SpreadsheetApp.getActiveSheet().getRange('K1').getValue();
var tokenEndpoint = "https://btc-e.com/tapi";
var sign= 'TEMP'
var head = {
'Content-Type': 'application/x-www-form-urlencoded',
'Key': token,
'Sign': sign
}
var params = {
method : "POST",
method : "getInfo",
headers: head,
contentType: 'application/x-www-form-urlencoded',
method : "getInfo",
nonce: nonce
}
var request = UrlFetchApp.getRequest(tokenEndpoint, params);
var response = UrlFetchApp.fetch(tokenEndpoint, params);
var response2=String(response);
SpreadsheetApp.getActiveSheet().getRange('K2').setValue(nonce+1);
SpreadsheetApp.getActiveSheet().getRange('I16').setValue(response2);
SpreadsheetApp.getActiveSheet().getRange('I17').setValue(nonce);
}
This just yields the error
Attribute provided with invalid value: method
Thanks,
Steve
PS: First time posting, I tried to get the format correct.
I made the following Google JavaScript function to do POST access to BTC-e. You can find this function in action in the example spreadsheet I made to demonstrate the BTC-e API functions.
function btceHttpPost(keyPair, method, params, nonce) {
if (keyPair === undefined) {
return "{'error':'missing key pair'}"
}
if (params === undefined) {
params = '';
}
// Cleanup keypair, remove all \s (any whitespace)
var keyPair = keyPair.replace(/[\s]/g, '');
// Keypair example: "AFE730YV-S9A4FXBJ-NQ12HXS9-CA3S3MPM-CKQLU0PG,96a00f086824ddfddd9085a5c32b8a7b225657ae2fe9c4483b4c109fab6bf1a7"
keyPair = keyPair.split(',');
var pubKey = keyPair[0];
var privKey = keyPair[1];
// As specified on the BTC-e api (https://btc-e.com/api/documentation) the
// nonce POST parameter must be an incrementing integer (>0). The easiest
// implementation is the use of a timestamp (TS), so there is no need
// for persistant storage. Preferable, the resolution of the TS should be
// small enough the handle the desired call-frequency (a sleep of the TS
// resolution can fix this but I don't like such a waste). Another
// consideration is the sizeof the nonce supported by BTC-e. Experiments
// revealed this is a 32 bit unsigned number. The native JavaScript TS,
// stored in a float, can be 53 bits and has a resolution of 1 ms.
if (nonce === undefined)
// This time stamp counts amount of 200ms ticks starting from Jan 1st, 2014 UTC
// On 22 Mar 2041 01:17:39 UTC, it will overflow the 32 bits and will fail
// the nonce key for BTC-e
var nonce = Math.floor((Date.now() - Date.UTC(2014,0)) / 200);
// Construct payload message
var msg = 'nonce=' + nonce + '&method=' + method + params;
var msgSign = Utilities.computeHmacSignature(Utilities.MacAlgorithm.HMAC_SHA_512, msg, privKey);
// Convert encoded message from byte[] to hex string
for (var msgSignHex = [], i = 0; i < msgSign.length; i++) {
// Doing it nibble by nibble makes sure we keep leading zero's
msgSignHex.push(((msgSign[i] >>> 4) & 0xF).toString(16));
msgSignHex.push((msgSign[i] & 0xF).toString(16));
}
msgSignHex = msgSignHex.join('');
var httpHeaders = {'Key': pubKey, 'Sign': msgSignHex};
var fetchOptions = {'method': 'post', 'headers': httpHeaders, 'payload': msg};
var reponse = UrlFetchApp.fetch('https://btc-e.com/tapi', fetchOptions);
return reponse.getContentText();
};
The problem looks to be with your params object . You have method set thrice in the same object, which is a source of confusion.
Next, take a look at the documentation for UrlFetchApp.fetch() ( https://developers.google.com/apps-script/reference/url-fetch/url-fetch-app#fetch(String,Object) ) . The method can take a value of post, get, delete, put.
The getInfo should probably be appended to your URL to make it
var tokenEndpoint = "https://btc-e.com/tapi/getInfo"
Per the docs, you also have to put in more parameters to the request, nonce, api key etc. Use this as a starting point, revisit the documentation and get back to SO if you still have trouble