How to write SuiteScript for map reduce CSV file or JSON data as an input process it and Create a customer Record - suitescript2.0

Map Reduce script for Csv file or JSON Data as input process it and create a customer record

Suite Answer 43795 has an excellent "Processing Invoices Example" sample script that you can work from, it is also pasted below. Suite Answer 43795, also has additional information for each map/reduce stage.
/**
* #NApiVersion 2.x
* #NScriptType MapReduceScript
*/
define(['N/search', 'N/record', 'N/email', 'N/runtime', 'N/error'],
function(search, record, email, runtime, error){
function handleErrorAndSendNotification(e, stage){
log.error('Stage: ' + stage + ' failed', e);
var author = -5;
var recipients = 'notify#xxxxxx.com';
var subject = 'Map/Reduce script ' + runtime.getCurrentScript().id + ' failed for stage: ' + stage;
var body = 'An error occurred with the following information:\n' + 'Error code: ' + e.name + '\n' + 'Error msg: ' + e.message;
email.send({
author: author,
recipients: recipients,
subject: subject,
body: body
});
}
function handleErrorIfAny(summary){
var inputSummary = summary.inputSummary;
var mapSummary = summary.mapSummary;
var reduceSummary = summary.reduceSummary;
if (inputSummary.error)
{
var e = error.create({
name: 'INPUT_STAGE_FAILED',
message: inputSummary.error
});
handleErrorAndSendNotification(e, 'getInputData');
}
handleErrorInStage('map', mapSummary);
handleErrorInStage('reduce', reduceSummary);
}
function handleErrorInStage(stage, summary){
var errorMsg = [];
summary.errors.iterator().each(function(key, value){
var msg = 'Failure to accept payment from customer id: ' + key + '. Error was: ' + JSON.parse(value).message + '\n';
errorMsg.push(msg);
return true;
});
if (errorMsg.length > 0)
{
var e = error.create({
name: 'RECORD_TRANSFORM_FAILED',
message: JSON.stringify(errorMsg)
});
handleErrorAndSendNotification(e, stage);
}
}
function createSummaryRecord(summary){
try{
var seconds = summary.seconds;
var usage = summary.usage;
var yields = summary.yields;
var rec = record.create({
type: 'customrecord_summary',
});
rec.setValue({
fieldId : 'name',
value: 'Summary for M/R script: ' + runtime.getCurrentScript().id
});
rec.setValue({
fieldId: 'custrecord_time',
value: seconds
});
rec.setValue({
fieldId: 'custrecord_usage',
value: usage
});
rec.setValue({
fieldId: 'custrecord_yields',
value: yields
});
rec.save();
} catch(e){
handleErrorAndSendNotification(e, 'summarize');
}
}
function applyLocationDiscountToInvoice(recordId){
var invoice = record.load({
type: record.Type.INVOICE,
id: recordId,
isDynamic: true
});
var location = invoice.getText({
fieldId: 'location'
});
var discount;
if (location === 'East Coast')
discount = 'Eight Percent';
else if (location === 'West Coast')
discount = 'Five Percent';
else if (location === 'United Kingdom')
discount = 'Nine Percent';
else
discount = '';
invoice.setText({
fieldId: 'discountitem',
text: discount,
ignoreFieldChange : false
});
log.debug(recordId + ' has been updated with location-based discount.');
invoice.save();
}
function getInputData(){
return search.create({
type: record.Type.INVOICE,
filters: [['status', search.Operator.IS, 'open']],
columns: ['entity'],
title: 'Open Invoice Search'
});
}
function map(context){
var searchResult = JSON.parse(context.value);
var invoiceId = searchResult.id;
var entityId = searchResult.values.entity.value;
applyLocationDiscountToInvoice(invoiceId);
context.write({
key: entityId,
value: invoiceId
});
}
function reduce(context){
var customerId = context.key;
var custPayment = record.transform({
fromType: record.Type.CUSTOMER,
fromId: customerId,
toType: record.Type.CUSTOMER_PAYMENT,
isDynamic: true
});
var lineCount = custPayment.getLineCount('apply');
for (var j = 0; j < lineCount; j++){
custPayment.selectLine({
sublistId: 'apply',
line: j
});
custPayment.setCurrentSublistValue({
sublistId: 'apply',
fieldId: 'apply',
value: true
});
}
var custPaymentId = custPayment.save();
context.write({
key: custPaymentId
});
}
function summarize(summary){
handleErrorIfAny(summary);
createSummaryRecord(summary);
}
return {
getInputData: getInputData,
map: map,
reduce: reduce,
summarize: summarize
};
});

Related

SuiteScript Workflow Action error SSS_USAGE_LIMIT_EXCEEDED and email not attaching to related records

I am receiving the following error when I try to process more than approx 15 transactions:
Error: SSS_USAGE_LIMIT_EXCEEDED
{"type":"error.SuiteScriptError","name":"SSS_USAGE_LIMIT_EXCEEDED","message":"Script Execution Usage Limit Exceeded","stack":["createError(N/error)","onAction(/SuiteScripts/sdf_ignore/Send Remittance PFA Workflow Action.js:84)"],"cause":{"type":"internal error","code":"SSS_USAGE_LIMIT_EXCEEDED","details":"Script Execution Usage Limit Exceeded","userEvent":null,"stackTrace":["createError(N/error)","onAction(/SuiteScripts/sdf_ignore/Send Remittance PFA Workflow Action.js:84)"],"notifyOff":false},"id":"","notifyOff":false,"userFacing":false}
The script is a workflow action script that is triggered when the user clicks on a button to send email remittance advice.
This works for the most part unless I exceed a certain number of transactions.
Do I need to use another script type?
Or can I modify the following script to reduce its governance usage?
/**
*#NApiVersion 2.x
*#NScriptType WorkflowActionScript
*/
define([
"N/search",
"N/record",
"N/render",
"N/file",
"N/xml",
"N/email",
], function (search, record, render, file, xml, email) {
function onAction(context) {
var fileObj = [];
var record = context.newRecord;
log.debug("record", record);
var batchId = record.getValue({ fieldId: "name" });
var id = record.id;
log.debug("recordid", record.id);
var vendorpaymentSearchObj = search.create({
type: "vendorpayment",
filters: [
["type", "anyof", "VendPymt"],
"AND",
["custbody_9997_pfa_record", "anyof", id],
],
columns: [
search.createColumn({
name: "transactionnumber",
summary: "GROUP",
label: "Transaction Number",
}),
search.createColumn({
name: "formulatext",
summary: "GROUP",
formula: "{entity}",
label: "Vendor",
}),
search.createColumn({
name: "formulatext",
summary: "GROUP",
formula:
"CASE WHEN {vendor.custentity_2663_email_address_notif} IS NULL THEN {vendor.email} ELSE {vendor.custentity_2663_email_address_notif} END",
label: "Email",
}),
search.createColumn({
name: "total",
summary: "SUM",
label: "Amount (Transaction Total)",
}),
search.createColumn({
name: "currency",
summary: "GROUP",
label: "Currency",
}),
search.createColumn({
name: "trandate",
summary: "GROUP",
sort: search.Sort.ASC,
label: "Date",
}),
search.createColumn({
name: "internalid",
summary: "GROUP",
label: "internalid",
}),
search.createColumn({
name: "internalid",
join: "vendor",
summary: "GROUP",
label: "Internal ID",
}),
search.createColumn({
name: "internalid",
summary: "GROUP",
label: "Internal ID",
}),
],
});
var searchResultCount = vendorpaymentSearchObj.runPaged().count;
log.debug("vendorpaymentSearchObj result count", searchResultCount);
vendorpaymentSearchObj.run().each(function (result) {
var emailAddress = result.getValue(result.columns[2]);
var transactionNumber = result.getValue(result.columns[0]);
var amount = result.getValue(result.columns[3]);
var date = result.getValue(result.columns[5]);
var vendor = result.getValue(result.columns[1]);
var resultId = result.getValue(result.columns[6]);
var vendorId = result.getValue(result.columns[7]);
var transactionId = result.getValue(result.columns[8]);
log.debug(
"emailAddress: ",
emailAddress +
" transaction bumber: " +
transactionNumber +
" amount: " +
amount +
" date: " +
date +
" vendor: " +
vendor +
" resultId " +
resultId +
" transactionId " +
transactionId
);
var pdfFile = render.transaction({
entityId: parseInt(resultId),
printMode: render.PrintMode.PDF,
formId: 109,
inCustLocale: true,
});
pdfFile.folder = 1351;
var fileId = pdfFile.save();
var pdffile2 = file.load({ id: fileId });
// var fileObj = file.load({ id: parseInt(fileId) });
var mergeResult = render.mergeEmail({
templateId: 8,
// entity: {
// type: "employee",
// id: parseInt(recipient),
// },
entity: {
type: "vendor",
id: parseInt(vendorId),
},
recipient: {
type: "vendor",
id: parseInt(vendorId),
},
supportCaseId: null,
transactionId: parseInt(resultId),
customRecord: null,
// {
// type: "customrecord_2663_entity_bank_details",
// id: parseInt(bankDetailsId),
// },
});
var emailSubject = mergeResult.subject;
var emailBody = mergeResult.body;
//create a placeholder in the original HTML with an element called NLID. This will replace that with a value that is part of the script
emailSubject = emailSubject.replace("NLVENDOR", vendor);
// emailBody = emailBody.replace("NLDOCNUMBER", bankDetailsId);
var emailString = JSON.stringify(emailAddress);
email.send({
author: -5,
// recipients: 2020,
recipients: emailAddress,
subject: emailSubject,
body: emailBody,
attachments: [pdffile2],
relatedRecords: {
entity: parseInt(vendorId),
customRecord: {
id: parseInt(id),
recordType: 'customrecord_2663_file_admin', //an integer value
},
// transactionId: 38326,
},
});
return true;
});
/*
vendorpaymentSearchObj.id="customsearch1658554258593";
vendorpaymentSearchObj.title="Bill Payments in a Payment Batch (copy)";
var newSearchId = vendorpaymentSearchObj.save();
*/
}
return {
onAction: onAction,
};
});
Another issue I am having with this script is the email.send method doesn't throw an error for the custom record but doesn't actually attach the email messages to the stated transaction type either. It doesn't allow me to attach the emails to a transaction at all (I get an 'unexpected error' if I do)
I finally got a map reduce working for this:
/**
*#NApiVersion 2.x
*#NScriptType MapReduceScript
*/
define([
"N/search",
"N/record",
"N/render",
"N/file",
"N/xml",
"N/email",
"N/runtime",
], function (search, record, render, file, xml, email, runtime) {
function getInputData(context) {
var scriptObj = runtime.getCurrentScript();
var recordId = scriptObj.getParameter("custscript_recordid");
// var record = context.newRecord;
// log.debug("record", record);
// var batchId = record.getValue({ fieldId: "name" });
var id = recordId;
log.debug("recordid", recordId);
var vendorpaymentSearchObj = search.create({
type: "vendorpayment",
filters: [
["type", "anyof", "VendPymt"],
"AND",
["custbody_9997_pfa_record", "anyof", id],
],
columns: [
//0
search.createColumn({
name: "transactionnumber",
summary: "GROUP",
label: "Transaction Number",
}),
//1
search.createColumn({
name: "formulatext",
summary: "GROUP",
formula: "{entity}",
label: "Vendor",
}),
//2
search.createColumn({
name: "formulatext",
summary: "GROUP",
formula:
"CASE WHEN {vendor.custentity_2663_email_address_notif} IS NULL THEN {vendor.email} ELSE {vendor.custentity_2663_email_address_notif} END",
label: "Email",
}),
//3
search.createColumn({
name: "total",
summary: "SUM",
label: "Amount (Transaction Total)",
}),
//4
search.createColumn({
name: "currency",
summary: "GROUP",
label: "Currency",
}),
//5
search.createColumn({
name: "trandate",
summary: "GROUP",
sort: search.Sort.ASC,
label: "Date",
}),
//6
search.createColumn({
name: "internalid",
join: "vendor",
summary: "GROUP",
label: "vendorId",
}),
//7
search.createColumn({
name: "internalid",
summary: "GROUP",
label: "searchResultId",
}),
],
});
log.debug(
"vendorpaymentSearchObj result count",
vendorpaymentSearchObj.runPaged().count
);
var vendorPayments = [];
vendorpaymentSearchObj.run().each(function (result) {
vendorPayments.push({
emailAddress: result.getValue(result.columns[2]),
transactionNumber: result.getValue(result.columns[0]),
amount: result.getValue(result.columns[3]),
date: result.getValue(result.columns[5]),
vendor: result.getValue(result.columns[1]),
resultId: result.getValue(result.columns[7]),
vendorId: result.getValue(result.columns[6]),
id: id,
// transactionId: result.getValue(result.columns[8]),
});
return true;
});
return vendorPayments;
}
/**
* #param {MapReduceContext.map} context
*/
function map(context) {
try {
log.debug("context", context);
const result = JSON.parse(context.value);
log.debug("result", result);
var emailAddress = result.emailAddress;
var transactionNumber = result.transactionNumber;
var amount = result.amount;
var date = result.date;
var vendor = result.vendor;
var resultId = result.resultId;
var vendorId = result.vendorId;
var id = result.id;
// var transactionId = result.transactionId;
log.debug(
"emailAddress: ",
emailAddress +
" transaction bumber: " +
transactionNumber +
" amount: " +
amount +
" date: " +
date +
" vendor: " +
vendor +
" resultId " +
resultId +
// " transactionId " +
// transactionId +
"vendorId " +
vendorId
);
// for (var i = 0; i < context.value.length; i++) {
var pdfFile = render.transaction({
entityId: parseInt(resultId),
printMode: render.PrintMode.PDF,
formId: 109,
});
pdfFile.folder = 1351;
var fileId = pdfFile.save();
var pdffile2 = file.load({ id: fileId });
// context.write({
// key: context.value[i],
// value: [
// [pdffile2],
// pdfFile,
// fileId,
// emailAddress,
// transactionNumber,
// amount,
// date,
// vendor,
// resultId,
// vendorId,
// transactionId,
// ],
// });
log.debug("fileid: " + fileId + pdfFile + pdffile2);
context.write({
key: resultId,
value: JSON.stringify({
vendorId: vendorId,
vendor: vendor,
fileId: fileId,
emailAddress: emailAddress,
id: id,
}),
});
// }
} catch (ex) {
log.error("Error on map", ex.message + ex.error);
}
}
// var fileObj = file.load({ id: parseInt(fileId) });
/**
* #param {MapReduceContext.reduce} context
*/
function reduce(context) {
try {
var reduceResults = context.values; //note: context.values and not context.value unlike the earlier stages. Also, this is not JSON.Parse
log.debug("reduceResults", reduceResults);
var pdffile2 = [];
for (var i = 0; i < reduceResults.length; i++) {
//note: this is context.value(S)
log.debug("vendorId", JSON.parse(reduceResults[i]).vendorId);
log.debug("key", context.key);
if (reduceResults) {
var mergeResult = render.mergeEmail({
templateId: 8,
// entity: {
// type: "employee",
// id: parseInt(recipient),
// },
entity: {
type: "vendor",
id: parseInt(JSON.parse(reduceResults[i]).vendorId),
},
recipient: {
type: "vendor",
id: parseInt(JSON.parse(reduceResults[i]).vendorId),
},
supportCaseId: null,
transactionId: parseInt(context.key),
customRecord: null,
});
log.debug("mergeResult", mergeResult);
var emailSubject = mergeResult.subject;
var emailBody = mergeResult.body;
log.debug("email body", emailBody);
var pdf = file.load({
id: JSON.parse(reduceResults[i]).fileId,
});
var vendorName = JSON.parse(reduceResults[i]).vendor;
log.debug("vendorname", vendorName);
pdffile2.push(pdf);
//create a placeholder in the original HTML with an element called NLVENDOR. This will replace that with a value that is part of the script
var emailSubjectNew = emailSubject.replace("NLVENDOR", vendorName);
log.debug("email subject", emailSubjectNew);
var emailString = JSON.parse(reduceResults[i]).emailAddress;
log.debug("emailstring", emailString);
email.send({
author: -5,
recipients: JSON.parse(reduceResults[i]).emailAddress,
subject: emailSubjectNew,
body: emailBody,
attachments: pdffile2,
relatedRecords: {
entity: parseInt(JSON.parse(reduceResults[i]).vendorId),
transactionId: parseInt(context.key),
},
});
}
}
} catch (ex) {
log.error("Error on reduce", ex.message + "" + ex.name);
}
}
/**
* #param {MapReduceContext.summarize} context
*/
function summarize(summary) {
log.debug("context", summary);
summary.output.iterator().each(function (key, value) {
contents += key + " " + value + "\n";
return true;
});
}
return {
getInputData: getInputData,
map: map,
reduce: reduce,
summarize: summarize,
};
});
To me this operation seems more suitable for a Map/Reduce if you don't know how many results the search will have. Meaning if it always rendered one PDF and sent it so no need for a MR but if the amount is unknown so a Map/Reduce is the way to go.
If you still want to try and reduce usage on this script you can try:
Not saving and loading the PDF. Instead just generate it and send it. (assuming this is not a requirement)
You can try adding search criteria to narrow down the query
I'm doubtful that either of these will reduce the usage enough to make a real difference. I would look into the N/task module.

Add quantity for expense sublist on line commit

I have an expense report transaction where I am trying to sum the quantity field on the sublist and update a custom field (i.e. basically a running total that is updated every time a line is committed)
This is a part of the client script that I have so far:
function sublistChanged(context) {
var currentRecord = context.currentRecord;
var sublistName = context.sublistId;
var sublistFieldName = context.fieldId;
var op = context.operation;
totalExpQuantity = 0;
if (sublistName === "expense") {
for (var i = 0; i < sublistName.length; i++) {
var quantity = currentRecord.getCurrentSublistValue({
sublistId: "expense",
fieldId: "quantity",
});
log.debug("quantity", quantity);
}
totalExpQuantity += parseInt(quantity);
var lineCount = currentRecord.getLineCount({
sublistId: "expense",
});
console.log(lineCount);
currentRecord.setValue({
fieldId: "custbody_mileage_exp_report",
value:
"Total has changed to " +
currentRecord.getValue({
fieldId: "amount",
}) +
" with operation: " +
op +
" total quantity: " +
totalExpQuantity,
});
}
}
Every time a line is committed with a quantity value, I want the total to increment by the value of the line that was committed
i.e. here, after every line is committed, the total should go from 10,000, to 10,500 to 15,500
I have tried a variation of this code where the following line is part of the 'for loop':
totalExpQuantity += parseInt(quantity);
i.e.
for (var i = 0; i < sublistName.length; i++) {
var quantity = currentRecord.getCurrentSublistValue({
sublistId: "expense",
fieldId: "quantity",
});
totalExpQuantity += parseInt(quantity);
log.debug("quantity", quantity);
}
I get the following result:
Is what I am trying to do possible?
Where am I going wrong in the code? Should it be a different entry point?
I have also tried postsourcing though it didn't retrieve the quantity field and returned a blank
Found the answer. This is the working version of the script:
/**
*#NApiVersion 2.x
*#NScriptType ClientScript
*/
define(["N/search"], function (search) {
function fieldChanged(context) {
try {
var recordObj = context.currentRecord;
var employeeObj = parseInt(
recordObj.getValue({
fieldId: "entity",
})
);
if (context.fieldId == "entity") {
var employeeName = parseInt(employeeObj);
log.debug("employee", employeeName);
var expensereportSearchObj = search.create({
type: "expensereport",
filters: [
["type", "anyof", "ExpRept"],
"AND",
["expensecategory", "anyof", "8"],
"AND",
["expensedate", "within", "thisfiscalyear"],
"AND",
["employee", "anyof", employeeObj],
],
columns: [
search.createColumn({
name: "entityid",
join: "employee",
label: "Name",
}),
search.createColumn({ name: "tranid", label: "Document Number" }),
search.createColumn({
name: "expensecategory",
label: "Expense Category",
}),
search.createColumn({ name: "expensedate", label: "Expense Date" }),
search.createColumn({ name: "currency", label: "Currency" }),
search.createColumn({
name: "quantity",
join: "expenseDetail",
sort: search.Sort.ASC,
label: "Quantity",
}),
],
});
var searchResult = expensereportSearchObj
.run()
.getRange({ start: 0, end: 1000 });
log.debug("result", JSON.stringify(searchResult));
var searchResultCount = expensereportSearchObj.runPaged().count;
log.debug("expensereportSearchObj result count", searchResultCount);
let q = 0;
for (var i = 0; i < searchResult.length; i++) {
var quantity = searchResult[i].getValue(searchResult[i].columns[5]);
log.debug("quantity", quantity);
q += parseInt(quantity);
log.debug("q", q);
}
recordObj.setValue({
fieldId: "custbody_employee_mileage_ytd",
value: q,
});
}
//loop through all results add +- to 0. see video for sub;ists
} catch (error) {
log.debug(
error.name,
"recordObjId: " +
recordObj +
", employee:" +
employeeName +
", message: " +
error.message +
", cause: " +
error.cause
);
}
}
function sublistChanged(context) {
var recordObj = context.currentRecord;
var sublistName = recordObj.getSublist({ sublistId: "expense" });
var lineCount = recordObj.getLineCount({ sublistId: "expense" });
var totalQuantity = 0;
for (i = 0; i < lineCount; i++) {
var expenseCategory = recordObj.getSublistValue({
sublistId: "expense",
fieldId: "category",
line: i,
});
log.debug("expenseCategory", expenseCategory);
var expenseQuantity = recordObj.getSublistValue({
sublistId: "expense",
fieldId: "quantity",
line: i,
});
if (expenseCategory == 8) {
totalQuantity += expenseQuantity;
}
recordObj.setValue({
fieldId: "custbody_mileage_exp_report",
value: totalQuantity,
});
}
}
return {
fieldChanged: fieldChanged,
sublistChanged: sublistChanged,
};
});

Cannot pass price into Stripe Checkout nodejs

I am using the embedded nodejs / javascript code for stripe checkout on my ecommerce website. However, I am trying to pass the name of the product(s) the customer will add to their cart, and the price as well, so I can display the items and prices on Stripe Checkout page.
I ran into the issue after making a connection to DB2, I cannot get the price of each item to be passed into the stripe checkout session. I think it may have to do with async, but even if it is, im not sure how to fix. I am also receiving the error: "(node:45673)UnhandledPromiseRejectionWarning: Error: Invalid integer: NaN"
(excuse the messy code. also some variables are not in use, just ignore)
app.post('/create-checkout-session', (req, res) => {
var amount = stringify(req.body)
console.log(req.body.sessionID)
var userId = req.body.sessionID
console.log("email: " + req.body.customer_email)
var email = req.body.customer_email;
var deliveryTotal = req.body.totalWithDelivery;
var totalVal = amount.split("=");
var totalPrice = parseFloat(totalVal[1]);
//console.log("TOTAL PRICE: " + totalPrice);
var finalPrice = parseFloat(Math.round(totalPrice * 100) / 100);
var finalTotal = parseFloat(Math.round(totalPrice * 100) / 100) + parseFloat(Math.round(deliveryTotal));
console.log("final total: " + finalTotal);
var itemName = ""
var itemPrice = ""
var totalNewPriceTest = ""
//query to database
var productsStripe = "select * from " + userId
console.log(userId)
console.log("query to db for displaying cart on stripe page")
ibmdb.open("DATABASE=BLUDB;HOSTNAME=;PORT=50000;PROTOCOL=TCPIP;UID="";PWD="";", function (err,conn) {
if (err) return console.log(err);
conn.query(productsStripe, function (err, rows) {
if (err) {
console.log(err)
}
console.log(rows)
for(var i = 0; i < rows.length; i++) {
itemName = rows[i]['ITEM']
itemPrice = rows[i]['PRICE']
totalNewPriceTest = parseFloat(rows[i]['PRICE'])
console.log("item name : " + itemName + " " + itemPrice )
totalNewPriceTest = parseFloat(totalNewPriceTest);
console.log("final overall prcie: " + (totalNewPriceTest))
}
console.log("inside productsStripe function.")
console.log("overall prcie: " + totalNewPriceTest)
})
})
totalNewPriceTest = parseFloat(totalNewPriceTest)
var grandTotal = totalNewPriceTest;
var finalGrandTotal = parseFloat(grandTotal)
console.log(parseFloat(finalGrandTotal))
//stripe
const session = stripe.checkout.sessions.create({
shipping_address_collection: {
allowed_countries: ['CA'],
},
payment_method_types: ['card'],
line_items: [
{
price_data: {
currency: 'CAD',
product_data: {
name: itemName,
},
unit_amount: finalGrandTotal,
//finalTotal * 100
},
quantity: 1,
},
],
mode: 'payment',
success_url: 'localhost:1001/successPg',
cancel_url: 'localhost:1001/catalogue',
customer_email: email,
});
console.log(session)
res.json({ id: session.id });
//console.log("customer id" + customer.id)
console.log("totalNewPriceTest " + totalNewPriceTest)
});
can anyone help? thank you in advance, and sorry for the terribly written code :(
You have to write following lines inside query callback :-
totalNewPriceTest = parseFloat(totalNewPriceTest)
var grandTotal = totalNewPriceTest;
var finalGrandTotal = parseFloat(grandTotal)
console.log(parseFloat(finalGrandTotal))
And for error check before parsing the data to int or float like
if(!isNAN(field))
value = parseFloat(field);
I did follow what you said, no errors, however it still doesn't reach the stripe checkout page... it logs in the console: Promise { }. i did research this and it says this has to once again do with async. not sure how to fix, read something about .then may work as well?
As you have guessed, it's a classic concurrency issue, first of all, this complete guide from MDN explains asynchronous javascript very well.
To briefly answer your case, you will need to continue executing stripe code in the query block. Why? Because you need to wait for the DB connection to open followed by a query execution, which both are asynchronous.
When you bypass those blocks, you're basically telling javascript to execute code in parallel, which in your case not what you want, you want to wait for the query to finish.
app.post('/create-checkout-session', (req, res) => {
var amount = stringify(req.body)
console.log(req.body.sessionID)
var userId = req.body.sessionID
console.log("email: " + req.body.customer_email)
var email = req.body.customer_email;
var deliveryTotal = req.body.totalWithDelivery;
var totalVal = amount.split("=");
var totalPrice = parseFloat(totalVal[1]);
//console.log("TOTAL PRICE: " + totalPrice);
var finalPrice = parseFloat(Math.round(totalPrice * 100) / 100);
var finalTotal = parseFloat(Math.round(totalPrice * 100) / 100) + parseFloat(Math.round(deliveryTotal));
console.log("final total: " + finalTotal);
var itemName = ""
var itemPrice = ""
var totalNewPriceTest = ""
//query to database
var productsStripe = "select * from " + userId
console.log(userId)
console.log("query to db for displaying cart on stripe page")
ibmdb.open("DATABASE=BLUDB;HOSTNAME=;PORT=50000;PROTOCOL=TCPIP;UID="";PWD="";", function (err,conn) {
if (err) return console.log(err);
conn.query(productsStripe, function (err, rows) {
if (err) {
console.log(err)
}
console.log(rows)
for(var i = 0; i < rows.length; i++) {
itemName = rows[i]['ITEM']
itemPrice = rows[i]['PRICE']
totalNewPriceTest = parseFloat(rows[i]['PRICE'])
console.log("item name : " + itemName + " " + itemPrice )
totalNewPriceTest = parseFloat(totalNewPriceTest);
console.log("final overall prcie: " + (totalNewPriceTest))
}
console.log("inside productsStripe function.")
console.log("overall prcie: " + totalNewPriceTest)
totalNewPriceTest = parseFloat(totalNewPriceTest)
var grandTotal = totalNewPriceTest;
var finalGrandTotal = parseFloat(grandTotal)
console.log(parseFloat(finalGrandTotal))
// continue executing here
//stripe
stripe.checkout.sessions.create({
shipping_address_collection: {
allowed_countries: ['CA'],
},
payment_method_types: ['card'],
line_items: [
{
price_data: {
currency: 'CAD',
product_data: {
name: itemName,
},
unit_amount: finalGrandTotal,
//finalTotal * 100
},
quantity: 1,
},
],
mode: 'payment',
success_url: 'localhost:1001/successPg',
cancel_url: 'localhost:1001/catalogue',
customer_email: email,
}).then((session) => {
console.log(session)
res.json({ id: session.id });
//console.log("customer id" + customer.id)
console.log("totalNewPriceTest " + totalNewPriceTest)
}).catch((err) => {
console.log('stripe err: ', err);
})
})
})
});
Other useful tips to follow:
Don't write business logic inside the router, instead create a controller file and move the logic into it.
Instead of connecting to the DB upon every request, create a DB connection instance and keep it open and available whenever you need it, start with creating its own helper file and then export the connection.

Writing data from txt file to database in node js

I read data from a text file:
const byline = require('byline');
const fs = require('fs');
var stream = byline(fs.createReadStream('C:/Users/../test.txt'));
var index = 0;
var headers;
var data = [];
stream.on('data', function(line) {
var currentData;
var entry;
var i;
line = line.toString(); // Convert the buffer stream to a string line
//line = Buffer.from(line, 'latin1').toString();
if (index === 0) {
headers = line.split(/[ ]+/);
} else {
currentData = line.split(/[ ]+/);
entry = {};
for (i = 0; i < headers.length; i++) {
entry[headers[i]] = currentData[i];
}
data.push(entry);
}
index++;
});
stream.on("error", function(err) {
console.log(err);
});
stream.on("end", function() {
console.log(data);
console.log("Done");
});
That works just fine. So now i am trying to write this into a my database. Therefore i implemented this part at the end of the stream, when all of the data has been read.
stream.on("end", function() {
console.log(data);
console.log("Done");
for(h = 0; h < data.length; h++){
let name = data[h].Name;
let id = data[h].ID;
let text = data[h].Text;
let text1 = data[h].Text1;
let text2 = data[h].Text2;
let text3 = data[h].Text3;
Conn.query('INSERT INTO Config SET ? ', { Name: name, ID: id, Text: text, Text1: text1, Text2: text2, Text3: text3}, function (error, result, fields) {
if (error) throw error;
});
}
return res.send({ error: false, data: result, message: 'New configuration has been created.' });
});
But it seems like this does not work. What am i missing?
You can try the following
stream.on("end", function() {
console.log(data);
console.log("Done");
var promises = [];
for (h = 0; h < data.length; h++) {
let name = data[h].Name;
let id = data[h].ID;
let text = data[h].Text;
let text1 = data[h].Text1;
let text2 = data[h].Text2;
let text3 = data[h].Text3;
promises.push(new Promise(function insertIntoDatabase(resolve, reject) {
Conn.query('INSERT INTO Config SET ? ', {
Name: name,
ID: id,
Text: text,
Text1: text1,
Text2: text2,
Text3: text3
}, function(error, result, fields) {
if (error) reject(error);
else resolve();
})
}));
}
Promise.all(promises)
.then(function(data) {
res.send({
error: false,
data: result,
message: 'New configuration has been created.'
});
})
.catch(function(err) {
console.log("Error occured due to : ", err);
res.send({
error: true,
data: null,
message: 'Error during config'
});
});
//It is a bad practise to have a async operation that can throw error inside a synchronous loop
/*
for(h = 0; h < data.length; h++){
let name = data[h].Name;
let id = data[h].ID;
let text = data[h].Text;
let text1 = data[h].Text1;
let text2 = data[h].Text2;
let text3 = data[h].Text3;
Conn.query('INSERT INTO Config SET ? ', { Name: name, ID: id, Text: text, Text1: text1, Text2: text2, Text3: text3}, function (error, result, fields) {
if (error) throw error;
});
}
*/
});

AdWord Script Export to BigQuery "Empty Response"

Utilizing the following AdWords Script to export to BigQuery, the BigQuery.Jobs.insert is causing the script to terminate due to "Empty response". Any reason the call is not getting a response?
var ACCOUNTS = ['xxx','xxx'];
var CONFIG = {
BIGQUERY_PROJECT_ID: 'xxx',
BIGQUERY_DATASET_ID: 'xxx',
// Truncate existing data, otherwise will append.
TRUNCATE_EXISTING_DATASET: true,
TRUNCATE_EXISTING_TABLES: true,
// Back up reports to Google Drive.
WRITE_DATA_TO_DRIVE: false,
// Folder to put all the intermediate files.
DRIVE_FOLDER: 'Adwords Big Query Test',
// Default date range over which statistics fields are retrieved.
DEFAULT_DATE_RANGE: '20140101,20140105',
// Lists of reports and fields to retrieve from AdWords.
REPORTS: [{NAME: 'KEYWORDS_PERFORMANCE_REPORT',
CONDITIONS: 'WHERE Impressions>0',
FIELDS: {'AccountDescriptiveName' : 'STRING',
'Date' : 'STRING',
'CampaignId' : 'STRING',
'CampaignName' : 'STRING',
'AdGroupId' : 'STRING',
'AdGroupName' : 'STRING',
'Id' : 'STRING',
'Criteria' : 'STRING',
'KeywordMatchType' : 'STRING',
'AdNetworkType1' : 'STRING',
'AdNetworkType2' : 'STRING',
'Device' : 'STRING',
'AveragePosition' : 'STRING',
'QualityScore' : 'STRING',
'CpcBid' : 'STRING',
'TopOfPageCpc' : 'STRING',
'Impressions' : 'STRING',
'Clicks' : 'STRING',
'ConvertedClicks' : 'STRING',
'Cost' : 'STRING',
'Conversions' : 'STRING'
}
}],
RECIPIENT_EMAILS: [
'xxx',
]
};
function main() {
createDataset();
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
createTable(reportConfig);
}
folder = getDriveFolder();
// Get an account iterator.
var accountIterator = MccApp.accounts().withIds(ACCOUNTS).withLimit(10).get();
var jobIdMap = {};
while (accountIterator.hasNext()) {
// Get the current account.
var account = accountIterator.next();
// Select the child account.
MccApp.select(account);
// Run reports against child account.
var accountJobIds = processReports(folder, account.getCustomerId());
jobIdMap[account.getCustomerId()] = accountJobIds;
}
waitTillJobsComplete(jobIdMap);
sendEmail(jobIdMap);
}
function createDataset() {
if (datasetExists()) {
if (CONFIG.TRUNCATE_EXISTING_DATASET) {
BigQuery.Datasets.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, {'deleteContents' : true});
Logger.log('Truncated dataset.');
} else {
Logger.log('Dataset %s already exists. Will not recreate.',
CONFIG.BIGQUERY_DATASET_ID);
return;
}
}
// Create new dataset.
var dataSet = BigQuery.newDataset();
dataSet.friendlyName = CONFIG.BIGQUERY_DATASET_ID;
dataSet.datasetReference = BigQuery.newDatasetReference();
dataSet.datasetReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
dataSet.datasetReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
dataSet = BigQuery.Datasets.insert(dataSet, CONFIG.BIGQUERY_PROJECT_ID);
Logger.log('Created dataset with id %s.', dataSet.id);
}
/**
* Checks if dataset already exists in project.
*
* #return {boolean} Returns true if dataset already exists.
*/
function datasetExists() {
// Get a list of all datasets in project.
var datasets = BigQuery.Datasets.list(CONFIG.BIGQUERY_PROJECT_ID);
var datasetExists = false;
// Iterate through each dataset and check for an id match.
if (datasets.datasets != null) {
for (var i = 0; i < datasets.datasets.length; i++) {
var dataset = datasets.datasets[i];
if (dataset.datasetReference.datasetId == CONFIG.BIGQUERY_DATASET_ID) {
datasetExists = true;
break;
}
}
}
return datasetExists;
}
function createTable(reportConfig) {
if (tableExists(reportConfig.NAME)) {
if (CONFIG.TRUNCATE_EXISTING_TABLES) {
BigQuery.Tables.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, reportConfig.NAME);
Logger.log('Truncated dataset %s.', reportConfig.NAME);
} else {
Logger.log('Table %s already exists. Will not recreate.',
reportConfig.NAME);
return;
}
}
// Create new table.
var table = BigQuery.newTable();
var schema = BigQuery.newTableSchema();
var bigQueryFields = [];
// Add account column to table.
var accountFieldSchema = BigQuery.newTableFieldSchema();
accountFieldSchema.description = 'AccountId';
accountFieldSchema.name = 'AccountId';
accountFieldSchema.type = 'STRING';
bigQueryFields.push(accountFieldSchema);
// Add each field to table schema.
var fieldNames = Object.keys(reportConfig.FIELDS);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var bigQueryFieldSchema = BigQuery.newTableFieldSchema();
bigQueryFieldSchema.description = fieldName;
bigQueryFieldSchema.name = fieldName;
bigQueryFieldSchema.type = reportConfig.FIELDS[fieldName];
bigQueryFields.push(bigQueryFieldSchema);
}
schema.fields = bigQueryFields;
table.schema = schema;
table.friendlyName = reportConfig.NAME;
table.tableReference = BigQuery.newTableReference();
table.tableReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
table.tableReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
table.tableReference.tableId = reportConfig.NAME;
table = BigQuery.Tables.insert(table, CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
Logger.log('Created table with id %s.', table.id);
}
function tableExists(tableId) {
// Get a list of all tables in the dataset.
var tables = BigQuery.Tables.list(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
var tableExists = false;
// Iterate through each table and check for an id match.
if (tables.tables != null) {
for (var i = 0; i < tables.tables.length; i++) {
var table = tables.tables[i];
if (table.tableReference.tableId == tableId) {
tableExists = true;
break;
}
}
}
return tableExists;
}
function processReports(folder, accountId) {
var jobIds = [];
// Iterate over each report type.
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
Logger.log('Running report %s for account %s', reportConfig.NAME,
accountId);
// Get data as csv
var csvData = retrieveAdwordsReport(reportConfig, accountId);
// If configured, back up data.
if (CONFIG.WRITE_DATA_TO_DRIVE) {
var fileName = reportConfig.NAME + '_' + accountId;
folder.createFile(fileName, csvData, MimeType.CSV);
Logger.log('Exported data to Drive folder ' +
CONFIG.DRIVE_FOLDER + ' for report ' + fileName);
}
// Convert to Blob format.
var blobData = Utilities.newBlob(csvData, 'application/octet-stream');
// Load data
var jobId = loadDataToBigquery(reportConfig, blobData);
jobIds.push(jobId);
}
return jobIds;
}
function retrieveAdwordsReport(reportConfig, accountId) {
var fieldNames = Object.keys(reportConfig.FIELDS);
var report = AdWordsApp.report(
'SELECT ' + fieldNames.join(',') +
' FROM ' + reportConfig.NAME + ' ' + reportConfig.CONDITIONS +
' DURING ' + CONFIG.DEFAULT_DATE_RANGE);
var rows = report.rows();
var csvRows = [];
// Header row
csvRows.push('AccountId,'+fieldNames.join(','));
// Iterate over each row.
while (rows.hasNext()) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var fieldValue = row[fieldName].toString();
var fieldType = reportConfig.FIELDS[fieldName];
/* Strip off % and perform any other formatting here.
if ((fieldType == 'FLOAT' || fieldType == 'INTEGER') &&
fieldValue.charAt(fieldValue.length - 1) == '%') {
fieldValue = fieldValue.substring(0, fieldValue.length - 1);
}*/
// Add double quotes to any string values.
if (fieldType == 'STRING') {
fieldValue = fieldValue.replace(',', ''); //Handle fields with comma in value returned
fieldValue = fieldValue.replace('"', ''); //Handle fields with double quotes in value returned
fieldValue = fieldValue.replace('+', ''); //Handle fields with "+" in value returned
fieldValue = '"' + fieldValue + '"';
}
csvRow.push(fieldValue);
}
csvRows.push(csvRow.join(','));
}
Logger.log('Downloaded ' + reportConfig.NAME + ' for account ' + accountId +
' with ' + csvRows.length + ' rows.');
return csvRows.join('\n');
}
function getDriveFolder() {
var folders = DriveApp.getFoldersByName(CONFIG.DRIVE_FOLDER);
// Assume first folder is the correct one.
if (folders.hasNext()) {
Logger.log('Folder name found. Using existing folder.');
return folders.next();
}
return DriveApp.createFolder(CONFIG.DRIVE_FOLDER);
}
function loadDataToBigquery(reportConfig, data) {
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() + s4() + s4() + s4();
}
var makeId = guid();
var job = {
jobReference: {
jobId: makeId
},
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return job.jobReference.jobId;
}
function waitTillJobsComplete(jobIdMap) {
var complete = false;
var remainingJobs = [];
var accountIds = Object.keys(jobIdMap);
for (var i = 0; i < accountIds.length; i++){
var accountJobIds = jobIdMap[accountIds[i]];
remainingJobs.push.apply(remainingJobs, accountJobIds);
}
while (!complete) {
if (AdWordsApp.getExecutionInfo().getRemainingTime() < 5){
Logger.log('Script is about to timeout, jobs ' + remainingJobs.join(',') +
' are still incomplete.');
}
remainingJobs = getIncompleteJobs(remainingJobs);
if (remainingJobs.length == 0) {
complete = true;
}
if (!complete) {
Logger.log(remainingJobs.length + ' jobs still being processed.');
// Wait 5 seconds before checking status again.
Utilities.sleep(5000);
}
}
Logger.log('All jobs processed.');
}
function getIncompleteJobs(jobIds) {
var remainingJobIds = [];
for (var i = 0; i < jobIds.length; i++) {
var jobId = jobIds[i];
var getJob = BigQuery.Jobs.get(CONFIG.BIGQUERY_PROJECT_ID, jobId);
if (getJob.status.state != 'DONE') {
remainingJobIds.push(jobId);
}
}
return remainingJobIds;
}
It appears the "Empty Response" error is being thrown on:
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Have tried quite a few tweaks, but the answer doesn't appear to obvious to me. Thanks for any help!
I can be wrong but - I think that problem was with jobId because of issue with guid() function - missing "+" sign.
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() s4() + s4() + s4();
}
Why not to use jobId from Response like below?
var job = {
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return insertJob.jobReference.jobId;
Added
In this case I would suggest to log jobId (makeId = guid()) and get job status following below link https://cloud.google.com/bigquery/docs/reference/v2/jobs/get#try-it
Enter ProjectId and JobId and you at least will see what is going on with your job!!
AdWords places a "--" in for null values. If you define your report fields as anything but string (e.g., float, integer, etc.) the insert will fail because it can't convert the dash dash to a float or integer.
Try setting all of your fields to string and see if that solves the problem.
Have you tried setting the WRITE_DATA_TO_DRIVE parameter to true to confirm that the report export is successful? How large is the result? I get the same error when attempting an insert greater than 10MB (~25k rows depending on columns). If the file export to Google Drive looks good, you can add a condition to the while loop in retrieveAdwordsReport to limit the file size. There was also a post on https://groups.google.com/forum/#!forum/adwords-scripts mentioning an issue when including AdNetworkType columns: https://groups.google.com/forum/#!searchin/adwords-scripts/adnetworktype2%7Csort:relevance/adwords-scripts/yK57JHCt3Cw/Cl1SjFaQBQAJ.
Limit result size:
var processedRows = 0;
// Iterate over each row.
while (rows.hasNext() && ++processedRows < 5000) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
if (processedRows % 1000 == 0)
{
Logger.log('Processed %s rows.',processedRows);
}
...