Related
I'm trying to run a script in Bing Ads that will get the performance data and write it in Google Sheets. I based my code on Microsoft example: https://learn.microsoft.com/en-us/advertising/scripts/examples/calling-google-services
However, my code won't work.
function main() {
// Set these fields based on the option you chose for getting an access token.
const credentials = {
accessToken: '',
clientId: '',
clientSecret: '',
refreshToken: ''
};
// To get a fileId or spreadsheetId, please see
// https://developers.google.com/sheets/api/guides/concepts#spreadsheet_id.
// The file must contain a single bid multiplier value (for example,1.1),
// which is used to update keyword bids.
const fileId = 'INSERT FILE ID HERE';
// The spreadsheet must contain 3 valid keyword IDs in cells A2, A3, and A4.
const spreadsheetId = 'INSERT SPREADSHEET ID HERE';
// The email address to send a notification email to at the end of the script.
const notificationEmail = 'INSERT EMAIL HERE';
var driveApi = GoogleApis.createDriveService(credentials);
// Read bid multiplier from the file.
// Reference: https://developers.google.com/drive/api/v3/reference/files/export
var bidMultiplier = driveApi.files.export({ fileId: fileId, mimeType: 'text/csv' }).body;
Logger.log(`read bid multiplier ${bidMultiplier}`);
var sheetsApi = GoogleApis.createSheetsService(credentials);
// Write the old and new bid values back to the spreadsheet.
// Reference: https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchUpdate
var campaignData = [];
var campaigns = AdsApp.campaigns().withCondition('Cost > 0').forDateRange('THIS_MONTH');
var campaignIterator = campaigns.get();
var iterator = 0;
while(campaignIterator.hasNext()) {
var campaign = campaignIterator.next();
var campaignName = campaign.getName();
var stats = campaign.getStats();
var cost = stats.getCost();
var clicks = stats.getClicks();
var conversions = stats.getConversions();
var impressions = stats.getImpressions();
campaignData[iterator] = {"CampaignName":campaignName,"cost":cost,"clicks":clicks,"conversions":conversions,"impressions":impressions};
iterator++;
}
var updateResponse = sheetsApi.spreadsheets.values.batchUpdate({ spreadsheetId: spreadsheetId }, {
data: [
{ range: 'A2:G2', values: campaignData.map(x => [x]) },
],
valueInputOption: 'USER_ENTERED'
});
Logger.log(`updated ${updateResponse.result.totalUpdatedCells} cells`);
var gmailApi = GoogleApis.createGmailService(credentials);
var email = [
`To: ${notificationEmail}`,
'Subject: Google Services script',
'',
`You script ran successfully ✓ and updated ${updateResponse.result.totalUpdatedCells} cells.`
].join('\n');
// Send the notification email.
// Reference: https://developers.google.com/gmail/api/v1/reference/users/messages/send
var sendResponse = gmailApi.users.messages.send({ userId: 'me' }, { raw: Base64.encode(email) });
Logger.log(`sent email thread ${sendResponse.result.threadId}`);
}
var GoogleApis;
(function (GoogleApis) {
function createSheetsService(credentials) {
return createService("https://sheets.googleapis.com/$discovery/rest?version=v4", credentials);
}
GoogleApis.createSheetsService = createSheetsService;
function createDriveService(credentials) {
return createService("https://www.googleapis.com/discovery/v1/apis/drive/v3/rest", credentials);
}
GoogleApis.createDriveService = createDriveService;
function createGmailService(credentials) {
return createService("https://www.googleapis.com/discovery/v1/apis/gmail/v1/rest", credentials);
}
GoogleApis.createGmailService = createGmailService;
// Creation logic based on https://developers.google.com/discovery/v1/using#usage-simple
function createService(url, credentials) {
var content = UrlFetchApp.fetch(url).getContentText();
var discovery = JSON.parse(content);
var baseUrl = discovery['rootUrl'] + discovery['servicePath'];
var accessToken = getAccessToken(credentials);
var service = build(discovery, {}, baseUrl, accessToken);
return service;
}
function createNewMethod(method, baseUrl, accessToken) {
return (urlParams, body) => {
var urlPath = method.path;
var queryArguments = [];
for (var name in urlParams) {
var paramConfg = method.parameters[name];
if (!paramConfg) {
throw `Unexpected url parameter ${name}`;
}
switch (paramConfg.location) {
case 'path':
urlPath = urlPath.replace('{' + name + '}', urlParams[name]);
break;
case 'query':
queryArguments.push(`${name}=${urlParams[name]}`);
break;
default:
throw `Unknown location ${paramConfg.location} for url parameter ${name}`;
}
}
var url = baseUrl + urlPath;
if (queryArguments.length > 0) {
url += '?' + queryArguments.join('&');
}
var httpResponse = UrlFetchApp.fetch(url, { contentType: 'application/json', method: method.httpMethod, payload: JSON.stringify(body), headers: { Authorization: `Bearer ${accessToken}` }, muteHttpExceptions: true });
var responseContent = httpResponse.getContentText();
var responseCode = httpResponse.getResponseCode();
var parsedResult;
try {
parsedResult = JSON.parse(responseContent);
} catch (e) {
parsedResult = false;
}
var response = new Response(parsedResult, responseContent, responseCode);
if (responseCode >= 200 && responseCode <= 299) {
return response;
}
throw response;
}
}
function Response(result, body, status) {
this.result = result;
this.body = body;
this.status = status;
}
Response.prototype.toString = function () {
return this.body;
}
function build(discovery, collection, baseUrl, accessToken) {
for (var name in discovery.resources) {
var resource = discovery.resources[name];
collection[name] = build(resource, {}, baseUrl, accessToken);
}
for (var name in discovery.methods) {
var method = discovery.methods[name];
collection[name] = createNewMethod(method, baseUrl, accessToken);
}
return collection;
}
function getAccessToken(credentials) {
if (credentials.accessToken) {
return credentials.accessToken;
}
var tokenResponse = UrlFetchApp.fetch('https://www.googleapis.com/oauth2/v4/token', { method: 'post', contentType: 'application/x-www-form-urlencoded', muteHttpExceptions: true, payload: { client_id: credentials.clientId, client_secret: credentials.clientSecret, refresh_token: credentials.refreshToken, grant_type: 'refresh_token' } });
var responseCode = tokenResponse.getResponseCode();
var responseText = tokenResponse.getContentText();
if (responseCode >= 200 && responseCode <= 299) {
var accessToken = JSON.parse(responseText)['access_token'];
return accessToken;
}
throw responseText;
}
})(GoogleApis || (GoogleApis = {}));
// Base64 implementation from https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/master/lib/msal-core/src/Utils.ts
class Base64 {
static encode(input) {
const keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
let output = "";
let chr1, chr2, chr3, enc1, enc2, enc3, enc4;
var i = 0;
input = this.utf8Encode(input);
while (i < input.length) {
chr1 = input.charCodeAt(i++);
chr2 = input.charCodeAt(i++);
chr3 = input.charCodeAt(i++);
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
}
else if (isNaN(chr3)) {
enc4 = 64;
}
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) + keyStr.charAt(enc3) + keyStr.charAt(enc4);
}
return output.replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
static utf8Encode(input) {
input = input.replace(/\r\n/g, "\n");
var utftext = "";
for (var n = 0; n < input.length; n++) {
var c = input.charCodeAt(n);
if (c < 128) {
utftext += String.fromCharCode(c);
}
else if ((c > 127) && (c < 2048)) {
utftext += String.fromCharCode((c >> 6) | 192);
utftext += String.fromCharCode((c & 63) | 128);
}
else {
utftext += String.fromCharCode((c >> 12) | 224);
utftext += String.fromCharCode(((c >> 6) & 63) | 128);
utftext += String.fromCharCode((c & 63) | 128);
}
}
return utftext;
}
}
The first part of the code which brings back values from the spreadsheet works, but when it gets to writing the data in Google Sheets, I get an error and I don't know what it means.
{
"error": {
"code": 404,
"message": "Requested entity was not found.",
"status": "NOT_FOUND"
}
}
at (script code:142:7)
I created two custom content types, ProjectContract and PaymentRequest. Under PaymentRequest, I have a reference field Contract which I would like to use to reference ProjectContract. When I am creating/changing PaymentRequest, I need the following:
how can I initialize Content Picker to display ContractNumber field of available ProjectContracts?
how can I display selected ProjectContract's ContractNumber under ReferenceField Grid control?
The SN js code and the mvc contains/returns fix field values. I did not find any setting where I can add custom fields to show.
First of all, what is the version of that SN package, because the oData.svc request will not work on older versions. It is available from 6.2.
About the oData, here is a link: http://wiki.sensenet.com/OData_REST_API
There is another way to solve it, but with this, you need to modify the existion SN codes.
You need to copy (" /Root/Global/scripts/sn/SN.Picker.js ") file into your skin folder with the same structure. (" /Root/Skins/[yourskinfolder]/scripts/sn/SN.ReferenceGrid.js ")
You need to copy (" /Root/Global/scripts/sn/SN.ReferenceGrid.js ") file into your skin folder as well.
Do not modify the original SN file, because it will be overwrite after an SN update.
Next step: copy the following code to line 1068, before the ("$grid.jqGrid({") line, into the InitGrid function.
...
var neededTypeName = "ProjectContract";
var neededFieldName = "ContractNumber";
var findField = false;
o2 = (function () {
var result = [];
var itemArray = [];
$.each(o2, function (index, el) {
el.ContentField = "";
result.push(el);
if (el.ContentTypeName == neededTypeName) {
itemArray.push([index, el.Path]);
findField = true;
}
});
if (findField) {
$.each(itemArray, function (itemIndex, itemElArray) {
var itemId = itemElArray[0];
var itemEl = itemElArray[1];
var thisLength = itemEl.length;
var thislastSplash = itemEl.lastIndexOf("/");
var thisPath = itemEl.substring(0, thislastSplash) + "('" + itemEl.substring(thislastSplash + 1, thisLength) + "')";
$.ajax({
url: "/oData.svc" + thisPath + "?metadata=no$select=Path," + neededFieldName,
dataType: "json",
async: false,
success: function (d) {
result[itemId].ContentField = d.d[neededFieldName];
}
});
});
colNames.splice(6, 0, "ContentField");
colModel.splice(6, 0, { index: "ContentField", name: "ContentField", width: 100 });
return result;
}
return o2;
})();
...
$grid.jqGrid({
...
The "neededTypeName" may contains your content type value, and the "neededFieldName" may contains the field name you want to render.
The other will build up the grid.
This will modify the Content picker table.
You need to add this code into the GetResultDataFromRow function, at line 660 before the return of the function.
...
if (rowdata.ContentField != undefined) {
result.ContentField = rowdata.ContentField;
}
...
This will add the selected item properties from the Content picker to the reference field table.
Then you need to open the SN.ReferenceGrid.js and add the following code into the init function before the "var $grid = $("#" + displayAreaId);"
var neededTypeName = "CustomItem2";
var neededFieldName = "Custom2Num";
var findField = false;
var alreadyAdded = false;
var btnAttr = $("#"+addButtonId).attr("onClick");
if (btnAttr.indexOf(neededTypeName) > -1) {
alreadyAdded = true;
colNames[4].width = 150;
colModel[4].width = 150;
colNames.splice(3, 0, "ContentField");
colModel.splice(3, 0, { index: "ContentField", name: "ContentField", width: 60 });
}
initialSelection = (function () {
var result = [];
var itemArray = [];
$.each(initialSelection, function (index, el) {
el.ContentField = "";
result.push(el);
if (el.ContentTypeName == neededTypeName) {
itemArray.push([index, el.Path]);
findField = true;
}
});
if (findField) {
$.each(itemArray, function (itemIndex, itemElArray) {
var itemId = itemElArray[0];
var itemEl = itemElArray[1];
var thisLength = itemEl.length;
var thislastSplash = itemEl.lastIndexOf("/");
var thisPath = itemEl.substring(0, thislastSplash) + "('" + itemEl.substring(thislastSplash + 1, thisLength) + "')";
$.ajax({
url: "/oData.svc" + thisPath + "?metadata=no$select=Path," + neededFieldName,
dataType: "json",
async: false,
success: function (d) {
result[itemId].ContentField = d.d[neededFieldName];
}
});
});
if (!alreadyAdded) {
colNames.splice(3, 0, "ContentField");
colModel.splice(3, 0, { index: "ContentField", name: "ContentField", width: 100 });
}
return result;
}
return initialSelection;
})();
I hope this will help but the SN version should be helpful.
I'd like to send base64 image as an attachment to a trello card through the API
POST /1/cards/[card id or shortlink]/attachments
There's a file field but it does not specify how should look the data there.
Refs: https://developers.trello.com/advanced-reference/card#post-1-cards-card-id-or-shortlink-attachments
Any idea?
Short answer: Trello's API only works to attach binary data via multipart/form-data. Examples below.
Long answer:
The Trello API to add attachments and images is frustratingly under-documented. They do have a coffeescript Client.js for those of us using Javascript to simplify all the basic operations: https://trello.com/1/client.coffee
Using the vanilla Client.js file I have been able to attach links and text files. While the CURL example shows pulling a binary file in from a hard drive, that doesn't work for those of us completely on a server or client where we don't have permissions to create a file.
From a LOT of trial and error, I've determined all binary data (images, documents, etc.) must be attached using multipart/form-data. Here is a jQuery snippet that will take a URL to an item, get it into memory, and then send it to Trello.
var opts = {'key': 'YOUR TRELLO KEY', 'token': 'YOUR TRELLO TOKEN'};
var xhr = new XMLHttpRequest();
xhr.open('get', params); // params is a URL to a file to grab
xhr.responseType = 'blob'; // Use blob to get the file's mimetype
xhr.onload = function() {
var fileReader = new FileReader();
fileReader.onload = function() {
var filename = (params.split('/').pop().split('#')[0].split('?')[0]) || params || '?'; // Removes # or ? after filename
var file = new File([this.result], filename);
var form = new FormData(); // this is the formdata Trello needs
form.append("file", file);
$.each(['key', 'token'], function(iter, item) {
form.append(item, opts.data[item] || 'ERROR! Missing "' + item + '"');
});
$.extend(opts, {
method: "POST",
data: form,
cache: false,
contentType: false,
processData: false
});
return $.ajax(opts);
};
fileReader.readAsArrayBuffer(xhr.response); // Use filereader on blob to get content
};
xhr.send();
I have submitted a new coffeescript for Trello developer support to consider uploading to replace Client.js. It adds a "Trello.upload(url)" that does this work.
I've also attached here for convenience in JS form.
// Generated by CoffeeScript 1.12.4
(function() {
var opts={"version":1,"apiEndpoint":"https://api.trello.com","authEndpoint":"https://trello.com"};
var deferred, isFunction, isReady, ready, waitUntil, wrapper,
slice = [].slice;
wrapper = function(window, jQuery, opts) {
var $, Trello, apiEndpoint, authEndpoint, authorizeURL, baseURL, collection, fn, fn1, i, intentEndpoint, j, key, len, len1, localStorage, location, parseRestArgs, readStorage, ref, ref1, storagePrefix, token, type, version, writeStorage;
$ = jQuery;
key = opts.key, token = opts.token, apiEndpoint = opts.apiEndpoint, authEndpoint = opts.authEndpoint, intentEndpoint = opts.intentEndpoint, version = opts.version;
baseURL = apiEndpoint + "/" + version + "/";
location = window.location;
Trello = {
version: function() {
return version;
},
key: function() {
return key;
},
setKey: function(newKey) {
key = newKey;
},
token: function() {
return token;
},
setToken: function(newToken) {
token = newToken;
},
rest: function() {
var args, error, method, params, path, ref, success;
method = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : [];
ref = parseRestArgs(args), path = ref[0], params = ref[1], success = ref[2], error = ref[3];
opts = {
url: "" + baseURL + path,
type: method,
data: {},
dataType: "json",
success: success,
error: error
};
if (!$.support.cors) {
opts.dataType = "jsonp";
if (method !== "GET") {
opts.type = "GET";
$.extend(opts.data, {
_method: method
});
}
}
if (key) {
opts.data.key = key;
}
if (token) {
opts.data.token = token;
}
if (params != null) {
$.extend(opts.data, params);
}
if (method === 'UPLOAD' && typeof (params) === "string" && params.length > 5) {
var xhr = new XMLHttpRequest();
xhr.open('get', params);
xhr.responseType = 'blob'; // Use blob to get the mimetype
xhr.onload = function() {
var fileReader = new FileReader();
fileReader.onload = function() {
var filename = (params.split('/').pop().split('#')[0].split('?')[0]) || params || '?'; // Removes # or ? after filename
var file = new File([this.result], filename);
var form = new FormData();
form.append("file", file);
$.each(['key', 'token'], function(iter, item) {
form.append(item, opts.data[item] || 'ERROR! Missing "' + item + '"');
});
$.extend(opts, {
method: "POST",
data: form,
cache: false,
contentType: false,
processData: false
});
return $.ajax(opts);
};
fileReader.readAsArrayBuffer(xhr.response); // Use filereader on blob to get content
};
xhr.send();
} else {
return $.ajax(opts);
}
},
authorized: function() {
return token != null;
},
deauthorize: function() {
token = null;
writeStorage("token", token);
},
authorize: function(userOpts) {
var k, persistToken, ref, regexToken, scope, v;
opts = $.extend(true, {
type: "redirect",
persist: true,
interactive: true,
scope: {
read: true,
write: false,
account: false
},
expiration: "30days"
}, userOpts);
regexToken = /[&#]?token=([0-9a-f]{64})/;
persistToken = function() {
if (opts.persist && (token != null)) {
return writeStorage("token", token);
}
};
if (opts.persist) {
if (token == null) {
token = readStorage("token");
}
}
if (token == null) {
token = (ref = regexToken.exec(location.hash)) != null ? ref[1] : void 0;
}
if (this.authorized()) {
persistToken();
location.hash = location.hash.replace(regexToken, "");
return typeof opts.success === "function" ? opts.success() : void 0;
}
if (!opts.interactive) {
return typeof opts.error === "function" ? opts.error() : void 0;
}
scope = ((function() {
var ref1, results;
ref1 = opts.scope;
results = [];
for (k in ref1) {
v = ref1[k];
if (v) {
results.push(k);
}
}
return results;
})()).join(",");
switch (opts.type) {
case "popup":
(function() {
var authWindow, height, left, origin, receiveMessage, ref1, top, width;
waitUntil("authorized", (function(_this) {
return function(isAuthorized) {
if (isAuthorized) {
persistToken();
return typeof opts.success === "function" ? opts.success() : void 0;
} else {
return typeof opts.error === "function" ? opts.error() : void 0;
}
};
})(this));
width = 420;
height = 470;
left = window.screenX + (window.innerWidth - width) / 2;
top = window.screenY + (window.innerHeight - height) / 2;
origin = (ref1 = /^[a-z]+:\/\/[^\/]*/.exec(location)) != null ? ref1[0] : void 0;
authWindow = window.open(authorizeURL({
return_url: origin,
callback_method: "postMessage",
scope: scope,
expiration: opts.expiration,
name: opts.name
}), "trello", "width=" + width + ",height=" + height + ",left=" + left + ",top=" + top);
receiveMessage = function(event) {
var ref2;
if (event.origin !== authEndpoint || event.source !== authWindow) {
return;
}
if ((ref2 = event.source) != null) {
ref2.close();
}
if ((event.data != null) && /[0-9a-f]{64}/.test(event.data)) {
token = event.data;
} else {
token = null;
}
if (typeof window.removeEventListener === "function") {
window.removeEventListener("message", receiveMessage, false);
}
isReady("authorized", Trello.authorized());
};
return typeof window.addEventListener === "function" ? window.addEventListener("message", receiveMessage, false) : void 0;
})();
break;
default:
window.location = authorizeURL({
redirect_uri: location.href,
callback_method: "fragment",
scope: scope,
expiration: opts.expiration,
name: opts.name
});
}
},
addCard: function(options, next) {
var baseArgs, getCard;
baseArgs = {
mode: 'popup',
source: key || window.location.host
};
getCard = function(callback) {
var height, left, returnUrl, top, width;
returnUrl = function(e) {
var data;
window.removeEventListener('message', returnUrl);
try {
data = JSON.parse(e.data);
if (data.success) {
return callback(null, data.card);
} else {
return callback(new Error(data.error));
}
} catch (error1) {}
};
if (typeof window.addEventListener === "function") {
window.addEventListener('message', returnUrl, false);
}
width = 500;
height = 600;
left = window.screenX + (window.outerWidth - width) / 2;
top = window.screenY + (window.outerHeight - height) / 2;
return window.open(intentEndpoint + "/add-card?" + $.param($.extend(baseArgs, options)), "trello", "width=" + width + ",height=" + height + ",left=" + left + ",top=" + top);
};
if (next != null) {
return getCard(next);
} else if (window.Promise) {
return new Promise(function(resolve, reject) {
return getCard(function(err, card) {
if (err) {
return reject(err);
} else {
return resolve(card);
}
});
});
} else {
return getCard(function() {});
}
}
};
ref = ["GET", "PUT", "POST", "DELETE", "UPLOAD"];
fn = function(type) {
return Trello[type.toLowerCase()] = function() {
return this.rest.apply(this, [type].concat(slice.call(arguments)));
};
};
for (i = 0, len = ref.length; i < len; i++) {
type = ref[i];
fn(type);
}
Trello.del = Trello["delete"];
ref1 = ["actions", "cards", "checklists", "boards", "lists", "members", "organizations", "lists"];
fn1 = function(collection) {
return Trello[collection] = {
get: function(id, params, success, error) {
return Trello.get(collection + "/" + id, params, success, error);
}
};
};
for (j = 0, len1 = ref1.length; j < len1; j++) {
collection = ref1[j];
fn1(collection);
}
window.Trello = Trello;
authorizeURL = function(args) {
var baseArgs;
baseArgs = {
response_type: "token",
key: key
};
return authEndpoint + "/" + version + "/authorize?" + $.param($.extend(baseArgs, args));
};
parseRestArgs = function(arg) {
var error, params, path, success;
path = arg[0], params = arg[1], success = arg[2], error = arg[3];
if (isFunction(params)) {
error = success;
success = params;
params = {};
}
path = path.replace(/^\/*/, "");
return [path, params, success, error];
};
localStorage = window.localStorage;
if (localStorage != null) {
storagePrefix = "trello_";
readStorage = function(key) {
return localStorage[storagePrefix + key];
};
writeStorage = function(key, value) {
if (value === null) {
return delete localStorage[storagePrefix + key];
} else {
return localStorage[storagePrefix + key] = value;
}
};
} else {
readStorage = writeStorage = function() {};
}
};
deferred = {};
ready = {};
waitUntil = function(name, fx) {
if (ready[name] != null) {
return fx(ready[name]);
} else {
return (deferred[name] != null ? deferred[name] : deferred[name] = []).push(fx);
}
};
isReady = function(name, value) {
var fx, fxs, i, len;
ready[name] = value;
if (deferred[name]) {
fxs = deferred[name];
delete deferred[name];
for (i = 0, len = fxs.length; i < len; i++) {
fx = fxs[i];
fx(value);
}
}
};
isFunction = function(val) {
return typeof val === "function";
};
wrapper(window, jQuery, opts);
}).call(this);
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.2.3/jquery.min.js"></script>
I'm trying to get the exchanges in ripple and I found this data API and its working. But I want to use the ripple websocket tool for some reasons. Is there any websocket equivalent for this data API?
I think there is equivalent if you use "tx_history" command in the socket but Sorry to tell you that the json result are not equal to your specific data result.
ripple data apiv2 is being played by ajax. see the result json formatter in ripple for exchange:
} else if (resp.rows.length) {
resp.rows[0] = {
base_currency: resp.rows[0].base_currency,
base_issuer: resp.rows[0].base_issuer,
base_amount: resp.rows[0].base_amount,
counter_amount: resp.rows[0].counter_amount,
counter_currency: resp.rows[0].counter_currency,
counter_issuer: resp.rows[0].counter_issuer,
rate: resp.rows[0].rate,
executed_time: resp.rows[0].executed_time,
ledger_index: resp.rows[0].ledger_index,
buyer: resp.rows[0].buyer,
seller: resp.rows[0].seller,
taker: resp.rows[0].taker,
provider: resp.rows[0].provider,
autobridged_currency: resp.rows[0].autobridged_currency,
autobridged_issuer: resp.rows[0].autobridged_issuer,
offer_sequence: resp.rows[0].offer_sequence,
tx_type: resp.rows[0].tx_type,
tx_index: resp.rows[0].tx_index,
node_index: resp.rows[0].node_index,
tx_hash: resp.rows[0].tx_hash
};
}
res.csv(resp.rows, filename);
} else {
res.json({
result: 'success',
count: resp.rows.length,
marker: resp.marker,
exchanges: resp.rows
});
} }
and it can be only access by get url :
route: '/v2/exchanges/{:base}/{:counter}'
that is bind in there server.js:
app.get('/v2/exchanges/:base/:counter', routes.getExchanges);
and last hint this is their database query using hbase:
HbaseClient.getExchanges = function (options, callback) {
var base = options.base.currency + '|' + (options.base.issuer || '');
var counter = options.counter.currency + '|' + (options.counter.issuer
||''); var table;
var keyBase;
var startRow;
var endRow;
var descending;
var columns;
if (counter.toLowerCase() > base.toLowerCase()) {
keyBase = base + '|' + counter;
} else {
keyBase = counter + '|' + base;
options.invert = true; }
if (!options.interval) {
table = 'exchanges';
descending = options.descending ? true : false;
options.unreduced = true;
//only need certain columns
if (options.reduce) {
columns = [
'd:base_amount',
'd:counter_amount',
'd:rate',
'f:executed_time',
'f:buyer',
'f:seller',
'f:taker'
];
}
} else if (exchangeIntervals.indexOf(options.interval) !== -1) {
keyBase = options.interval + '|' + keyBase;
descending = options.descending ? true : false;
table = 'agg_exchanges';
} else {
callback('invalid interval: ' + options.interval);
return; }
startRow = keyBase + '|' + options.start.hbaseFormatStartRow();
endRow = keyBase + '|' + options.end.hbaseFormatStopRow();
if (options.autobridged) {
options.filterstring = "DependentColumnFilter('f', 'autobridged_currency')";
if (columns) {
columns.push('f:autobridged_currency');
} }
this.getScanWithMarker(this, {
table: table,
startRow: startRow,
stopRow: endRow,
marker: options.marker,
limit: options.limit,
descending: descending,
columns: columns,
filterString: options.filterstring }, function (err, resp) {
if (!resp) {
resp = {rows: []};
}
if (!resp.rows) {
resp.rows = [];
}
if (options.reduce && options.unreduced) {
if (descending) {
resp.rows.reverse();
}
resp.reduced = reduce(resp.rows);
} else if (table === 'exchanges') {
resp.rows = formatExchanges(resp.rows);
} else {
resp.rows = formatAggregates(resp.rows);
}
callback(err, resp); });
/** * formatExchanges */
function formatExchanges (rows) {
rows.forEach(function(row) {
var key = row.rowkey.split('|');
delete row.base_issuer;
delete row.base_currency;
delete row.counter_issuer;
delete row.counter_currency;
row.base_amount = parseFloat(row.base_amount);
row.counter_amount = parseFloat(row.counter_amount);
row.rate = parseFloat(row.rate);
row.offer_sequence = Number(row.offer_sequence || 0);
row.ledger_index = Number(row.ledger_index);
row.tx_index = Number(key[6]);
row.node_index = Number(key[7]);
row.time = utils.unformatTime(key[4]).unix();
});
if (options.invert) {
rows = invertPair(rows);
}
return rows; }
/** * formatAggregates */
function formatAggregates (rows) {
rows.forEach(function(row) {
var key = row.rowkey.split('|');
row.base_volume = parseFloat(row.base_volume),
row.counter_volume = parseFloat(row.counter_volume),
row.buy_volume = parseFloat(row.buy_volume),
row.count = Number(row.count);
row.open = parseFloat(row.open);
row.high = parseFloat(row.high);
row.low = parseFloat(row.low);
row.close = parseFloat(row.close);
row.vwap = parseFloat(row.vwap);
row.close_time = Number(row.close_time);
row.open_time = Number(row.open_time);
});
if (options.invert) {
rows = invertPair(rows);
}
return rows; }
/** * if the base/counter key was inverted, we need to swap * some of the values in the results */
function invertPair (rows) {
var swap;
var i;
if (options.unreduced) {
for (i=0; i<rows.length; i++) {
rows[i].rate = 1/rows[i].rate;
//swap base and counter vol
swap = rows[i].base_amount;
rows[i].base_amount = rows[i].counter_amount;
rows[i].counter_amount = swap;
//swap buyer and seller
swap = rows[i].buyer;
rows[i].buyer = rows[i].seller;
rows[i].seller = swap;
}
} else {
for (i=0; i<rows.length; i++) {
//swap base and counter vol
swap = rows[i].base_volume;
rows[i].base_volume = rows[i].counter_volume;
rows[i].counter_volume = swap;
//swap high and low
swap = 1/rows[i].high;
rows[i].high = 1/rows[i].low;
rows[i].low = swap;
//invert open, close, vwap
rows[i].open = 1/rows[i].open;
rows[i].close = 1/rows[i].close;
rows[i].vwap = 1/rows[i].vwap;
//invert buy_volume
rows[i].buy_volume /= rows[i].vwap;
}
}
return rows; }
/** * reduce * reduce all rows */
function reduce (rows) {
var buyVolume = 0;
var reduced = {
open: 0,
high: 0,
low: Infinity,
close: 0,
base_volume: 0,
counter_volume: 0,
buy_volume: 0,
count: 0,
open_time: 0,
close_time: 0
};
rows = formatExchanges(rows);
// filter out small XRP amounts
rows = rows.filter(function(row) {
if (options.base.currency === 'XRP' && row.base_amount < 0.0005) {
return false;
} else if (options.counter.currency === 'XRP' && row.counter_amount < 0.0005) {
return false;
} else {
return true;
}
});
if (rows.length) {
reduced.open_time = moment.unix(rows[0].time).utc().format();
reduced.close_time = moment.unix(rows[rows.length-1].time).utc().format();
reduced.open = rows[0].rate;
reduced.close = rows[rows.length -1].rate;
reduced.count = rows.length;
} else {
reduced.low = 0;
return reduced;
}
rows.forEach(function(row) {
reduced.base_volume += row.base_amount;
reduced.counter_volume += row.counter_amount;
if (row.rate < reduced.low) reduced.low = row.rate;
if (row.rate > reduced.high) reduced.high = row.rate;
if (row.buyer === row.taker) {
reduced.buy_volume += row.base_amount;
}
});
reduced.vwap = reduced.counter_volume / reduced.base_volume;
return reduced; } };
Maybe you should make a custom websocket that make your RPC call upgrade to 1.1 http protocol (ws).
In nodejs you can simply
// for http
var http = require('http');
// for websocket
var ws = require("nodejs-websocket")
var options = {
host: 'URL-RPC-HERE',
port: '80',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': post_data.length
}
};
var req = http.request(options, function(res) {
// after getting the response wich is the <res>
// we can upgrade it to ws
upToWebsocket(res);
});
//Upgrade to websocket
var upToWebsocket = function(json) {
var server = ws.createServer(function (conn) {
conn.on("json", function (str) {
conn.sendText(str.toUpperCase()+"!!!")
})
conn.on("close", function (code, reason) {
console.log("Connection closed")
})
}).listen(8001)
}
And also if you have Rippled running on a server this does not help because there's no RPC or WS that supports exchange API.
Utilizing the following AdWords Script to export to BigQuery, the BigQuery.Jobs.insert is causing the script to terminate due to "Empty response". Any reason the call is not getting a response?
var ACCOUNTS = ['xxx','xxx'];
var CONFIG = {
BIGQUERY_PROJECT_ID: 'xxx',
BIGQUERY_DATASET_ID: 'xxx',
// Truncate existing data, otherwise will append.
TRUNCATE_EXISTING_DATASET: true,
TRUNCATE_EXISTING_TABLES: true,
// Back up reports to Google Drive.
WRITE_DATA_TO_DRIVE: false,
// Folder to put all the intermediate files.
DRIVE_FOLDER: 'Adwords Big Query Test',
// Default date range over which statistics fields are retrieved.
DEFAULT_DATE_RANGE: '20140101,20140105',
// Lists of reports and fields to retrieve from AdWords.
REPORTS: [{NAME: 'KEYWORDS_PERFORMANCE_REPORT',
CONDITIONS: 'WHERE Impressions>0',
FIELDS: {'AccountDescriptiveName' : 'STRING',
'Date' : 'STRING',
'CampaignId' : 'STRING',
'CampaignName' : 'STRING',
'AdGroupId' : 'STRING',
'AdGroupName' : 'STRING',
'Id' : 'STRING',
'Criteria' : 'STRING',
'KeywordMatchType' : 'STRING',
'AdNetworkType1' : 'STRING',
'AdNetworkType2' : 'STRING',
'Device' : 'STRING',
'AveragePosition' : 'STRING',
'QualityScore' : 'STRING',
'CpcBid' : 'STRING',
'TopOfPageCpc' : 'STRING',
'Impressions' : 'STRING',
'Clicks' : 'STRING',
'ConvertedClicks' : 'STRING',
'Cost' : 'STRING',
'Conversions' : 'STRING'
}
}],
RECIPIENT_EMAILS: [
'xxx',
]
};
function main() {
createDataset();
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
createTable(reportConfig);
}
folder = getDriveFolder();
// Get an account iterator.
var accountIterator = MccApp.accounts().withIds(ACCOUNTS).withLimit(10).get();
var jobIdMap = {};
while (accountIterator.hasNext()) {
// Get the current account.
var account = accountIterator.next();
// Select the child account.
MccApp.select(account);
// Run reports against child account.
var accountJobIds = processReports(folder, account.getCustomerId());
jobIdMap[account.getCustomerId()] = accountJobIds;
}
waitTillJobsComplete(jobIdMap);
sendEmail(jobIdMap);
}
function createDataset() {
if (datasetExists()) {
if (CONFIG.TRUNCATE_EXISTING_DATASET) {
BigQuery.Datasets.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, {'deleteContents' : true});
Logger.log('Truncated dataset.');
} else {
Logger.log('Dataset %s already exists. Will not recreate.',
CONFIG.BIGQUERY_DATASET_ID);
return;
}
}
// Create new dataset.
var dataSet = BigQuery.newDataset();
dataSet.friendlyName = CONFIG.BIGQUERY_DATASET_ID;
dataSet.datasetReference = BigQuery.newDatasetReference();
dataSet.datasetReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
dataSet.datasetReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
dataSet = BigQuery.Datasets.insert(dataSet, CONFIG.BIGQUERY_PROJECT_ID);
Logger.log('Created dataset with id %s.', dataSet.id);
}
/**
* Checks if dataset already exists in project.
*
* #return {boolean} Returns true if dataset already exists.
*/
function datasetExists() {
// Get a list of all datasets in project.
var datasets = BigQuery.Datasets.list(CONFIG.BIGQUERY_PROJECT_ID);
var datasetExists = false;
// Iterate through each dataset and check for an id match.
if (datasets.datasets != null) {
for (var i = 0; i < datasets.datasets.length; i++) {
var dataset = datasets.datasets[i];
if (dataset.datasetReference.datasetId == CONFIG.BIGQUERY_DATASET_ID) {
datasetExists = true;
break;
}
}
}
return datasetExists;
}
function createTable(reportConfig) {
if (tableExists(reportConfig.NAME)) {
if (CONFIG.TRUNCATE_EXISTING_TABLES) {
BigQuery.Tables.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, reportConfig.NAME);
Logger.log('Truncated dataset %s.', reportConfig.NAME);
} else {
Logger.log('Table %s already exists. Will not recreate.',
reportConfig.NAME);
return;
}
}
// Create new table.
var table = BigQuery.newTable();
var schema = BigQuery.newTableSchema();
var bigQueryFields = [];
// Add account column to table.
var accountFieldSchema = BigQuery.newTableFieldSchema();
accountFieldSchema.description = 'AccountId';
accountFieldSchema.name = 'AccountId';
accountFieldSchema.type = 'STRING';
bigQueryFields.push(accountFieldSchema);
// Add each field to table schema.
var fieldNames = Object.keys(reportConfig.FIELDS);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var bigQueryFieldSchema = BigQuery.newTableFieldSchema();
bigQueryFieldSchema.description = fieldName;
bigQueryFieldSchema.name = fieldName;
bigQueryFieldSchema.type = reportConfig.FIELDS[fieldName];
bigQueryFields.push(bigQueryFieldSchema);
}
schema.fields = bigQueryFields;
table.schema = schema;
table.friendlyName = reportConfig.NAME;
table.tableReference = BigQuery.newTableReference();
table.tableReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
table.tableReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
table.tableReference.tableId = reportConfig.NAME;
table = BigQuery.Tables.insert(table, CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
Logger.log('Created table with id %s.', table.id);
}
function tableExists(tableId) {
// Get a list of all tables in the dataset.
var tables = BigQuery.Tables.list(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
var tableExists = false;
// Iterate through each table and check for an id match.
if (tables.tables != null) {
for (var i = 0; i < tables.tables.length; i++) {
var table = tables.tables[i];
if (table.tableReference.tableId == tableId) {
tableExists = true;
break;
}
}
}
return tableExists;
}
function processReports(folder, accountId) {
var jobIds = [];
// Iterate over each report type.
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
Logger.log('Running report %s for account %s', reportConfig.NAME,
accountId);
// Get data as csv
var csvData = retrieveAdwordsReport(reportConfig, accountId);
// If configured, back up data.
if (CONFIG.WRITE_DATA_TO_DRIVE) {
var fileName = reportConfig.NAME + '_' + accountId;
folder.createFile(fileName, csvData, MimeType.CSV);
Logger.log('Exported data to Drive folder ' +
CONFIG.DRIVE_FOLDER + ' for report ' + fileName);
}
// Convert to Blob format.
var blobData = Utilities.newBlob(csvData, 'application/octet-stream');
// Load data
var jobId = loadDataToBigquery(reportConfig, blobData);
jobIds.push(jobId);
}
return jobIds;
}
function retrieveAdwordsReport(reportConfig, accountId) {
var fieldNames = Object.keys(reportConfig.FIELDS);
var report = AdWordsApp.report(
'SELECT ' + fieldNames.join(',') +
' FROM ' + reportConfig.NAME + ' ' + reportConfig.CONDITIONS +
' DURING ' + CONFIG.DEFAULT_DATE_RANGE);
var rows = report.rows();
var csvRows = [];
// Header row
csvRows.push('AccountId,'+fieldNames.join(','));
// Iterate over each row.
while (rows.hasNext()) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var fieldValue = row[fieldName].toString();
var fieldType = reportConfig.FIELDS[fieldName];
/* Strip off % and perform any other formatting here.
if ((fieldType == 'FLOAT' || fieldType == 'INTEGER') &&
fieldValue.charAt(fieldValue.length - 1) == '%') {
fieldValue = fieldValue.substring(0, fieldValue.length - 1);
}*/
// Add double quotes to any string values.
if (fieldType == 'STRING') {
fieldValue = fieldValue.replace(',', ''); //Handle fields with comma in value returned
fieldValue = fieldValue.replace('"', ''); //Handle fields with double quotes in value returned
fieldValue = fieldValue.replace('+', ''); //Handle fields with "+" in value returned
fieldValue = '"' + fieldValue + '"';
}
csvRow.push(fieldValue);
}
csvRows.push(csvRow.join(','));
}
Logger.log('Downloaded ' + reportConfig.NAME + ' for account ' + accountId +
' with ' + csvRows.length + ' rows.');
return csvRows.join('\n');
}
function getDriveFolder() {
var folders = DriveApp.getFoldersByName(CONFIG.DRIVE_FOLDER);
// Assume first folder is the correct one.
if (folders.hasNext()) {
Logger.log('Folder name found. Using existing folder.');
return folders.next();
}
return DriveApp.createFolder(CONFIG.DRIVE_FOLDER);
}
function loadDataToBigquery(reportConfig, data) {
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() + s4() + s4() + s4();
}
var makeId = guid();
var job = {
jobReference: {
jobId: makeId
},
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return job.jobReference.jobId;
}
function waitTillJobsComplete(jobIdMap) {
var complete = false;
var remainingJobs = [];
var accountIds = Object.keys(jobIdMap);
for (var i = 0; i < accountIds.length; i++){
var accountJobIds = jobIdMap[accountIds[i]];
remainingJobs.push.apply(remainingJobs, accountJobIds);
}
while (!complete) {
if (AdWordsApp.getExecutionInfo().getRemainingTime() < 5){
Logger.log('Script is about to timeout, jobs ' + remainingJobs.join(',') +
' are still incomplete.');
}
remainingJobs = getIncompleteJobs(remainingJobs);
if (remainingJobs.length == 0) {
complete = true;
}
if (!complete) {
Logger.log(remainingJobs.length + ' jobs still being processed.');
// Wait 5 seconds before checking status again.
Utilities.sleep(5000);
}
}
Logger.log('All jobs processed.');
}
function getIncompleteJobs(jobIds) {
var remainingJobIds = [];
for (var i = 0; i < jobIds.length; i++) {
var jobId = jobIds[i];
var getJob = BigQuery.Jobs.get(CONFIG.BIGQUERY_PROJECT_ID, jobId);
if (getJob.status.state != 'DONE') {
remainingJobIds.push(jobId);
}
}
return remainingJobIds;
}
It appears the "Empty Response" error is being thrown on:
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Have tried quite a few tweaks, but the answer doesn't appear to obvious to me. Thanks for any help!
I can be wrong but - I think that problem was with jobId because of issue with guid() function - missing "+" sign.
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() s4() + s4() + s4();
}
Why not to use jobId from Response like below?
var job = {
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return insertJob.jobReference.jobId;
Added
In this case I would suggest to log jobId (makeId = guid()) and get job status following below link https://cloud.google.com/bigquery/docs/reference/v2/jobs/get#try-it
Enter ProjectId and JobId and you at least will see what is going on with your job!!
AdWords places a "--" in for null values. If you define your report fields as anything but string (e.g., float, integer, etc.) the insert will fail because it can't convert the dash dash to a float or integer.
Try setting all of your fields to string and see if that solves the problem.
Have you tried setting the WRITE_DATA_TO_DRIVE parameter to true to confirm that the report export is successful? How large is the result? I get the same error when attempting an insert greater than 10MB (~25k rows depending on columns). If the file export to Google Drive looks good, you can add a condition to the while loop in retrieveAdwordsReport to limit the file size. There was also a post on https://groups.google.com/forum/#!forum/adwords-scripts mentioning an issue when including AdNetworkType columns: https://groups.google.com/forum/#!searchin/adwords-scripts/adnetworktype2%7Csort:relevance/adwords-scripts/yK57JHCt3Cw/Cl1SjFaQBQAJ.
Limit result size:
var processedRows = 0;
// Iterate over each row.
while (rows.hasNext() && ++processedRows < 5000) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
if (processedRows % 1000 == 0)
{
Logger.log('Processed %s rows.',processedRows);
}
...