enter image description here
For portal user, I want the list of field values to appear based on the value chosen by this portal user for example:
If this portal user chooses the country, only institutions in the selected country must appear in the list of institutions field.
publicWidget.registry.portalMedical = publicWidget.Widget.extend({
selector: '.o_portal_medical',
events: {
'change select[name="country_id"]': '_onCountryChange',
'change select[name="state_id"]': '_onStateChange',
'change select[name="institution_id"]': '_onInstitutionChange',
},
start: function () {
var def = this._super.apply(this, arguments);
this.$state = this.$('select[name="state_id"]');
this.$stateOptions = this.$state.filter(':enabled').find('option:not(:first)');
this.$institution = this.$('select[name="institution_id"]');
this.$institutionOptions = this.$institution.filter(':enabled').find('option:not(:first)');
this.$doctor = this.$('select[name="doctor_id"]');
this.$doctorOptions = this.$doctor.filter(':enabled').find('option:not(:first)');
this._adaptAddressForm();
return def;
},
_adaptAddressForm: function () {
var $country = this.$('select[name="country_id"]');
var countryID = ($country.val() || 0);
this.$stateOptions.detach();
var $displayedState = this.$stateOptions.filter('[data-country_id=' + countryID + ']');
var nb = $displayedState.appendTo(this.$state).show().length;
//this.$state.parent().toggle(nb >= 1);
var $state = this.$('select[name="state_id"]');
var stateID = ($state.val() || 0);
this.$institutionOptions.detach();
var $displayedInstitution = this.$institutionOptions.filter('[data-state_id=' + stateID + ']');
var mb = $displayedInstitution.appendTo(this.$institution).show().length;
//this.$institution.parent().toggle(mb >= 1);
},
_onCountryChange: function () {
this._adaptAddressForm();
},
_onStateChange: function () {
this._adaptAddressForm();
},
});
I am trying to insert multiple rows into a PostgresSQL server from my Nodejs server based on an array. I have a static variable, user_id, which would be the same for all entries but I want to change the filter_name based off an array. My goal is to not make multiple SQL insert calls.
arrayOfFilters = ["CM", "CBO", "SA", "EPS", "AD"]
await db.query(
"INSERT INTO filters(filter_name, requests_id)VALUES($1, $2)",
[arrayOfFiltersParams, user_id]);
I am hoping to have a row in the filters table for each one of the filters found in the arrayOfFilters with a matching user_id key for each entry (aka 5 rows for this example).
Thanks so much!
Write a helper function expand to build Parameterized query. Here is a solution:
import { pgclient } from '../../db';
function expand(rowCount, columnCount, startAt = 1) {
var index = startAt;
return Array(rowCount)
.fill(0)
.map(
(v) =>
`(${Array(columnCount)
.fill(0)
.map((v) => `$${index++}`)
.join(', ')})`,
)
.join(', ');
}
function flatten(arr) {
var newArr: any[] = [];
arr.forEach((v) => v.forEach((p) => newArr.push(p)));
return newArr;
}
(async function test() {
try {
await pgclient.connect();
// create table
await pgclient.query(`
CREATE TABLE IF NOT EXISTS filters (
id serial PRIMARY KEY,
filter_name VARCHAR(10),
requests_id INTEGER
)
`);
// test
const arrayOfFilters = ['CM', 'CBO', 'SA', 'EPS', 'AD'];
const user_id = 1;
const arrayOfFiltersParams: any[] = arrayOfFilters.map((el) => [el, user_id]);
await pgclient.query(
`INSERT INTO filters(filter_name, requests_id) VALUES ${expand(arrayOfFilters.length, 2)}`,
flatten(arrayOfFiltersParams),
);
} catch (error) {
console.log(error);
} finally {
pgclient.end();
}
})();
After executed above code, check the database:
node-sequelize-examples=# select * from "filters";
id | filter_name | requests_id
----+-------------+-------------
1 | CM | 1
2 | CBO | 1
3 | SA | 1
4 | EPS | 1
5 | AD | 1
(5 rows)
In Google BigQuery WebUI, it shows query result screen after executing a query, and it shows the button of "Save as Google Sheets". I like this feature but would like to automate this, is there such function through the REST API that I could do?
It doesn’t seem like there is a straightforward way to do this directly with the BigQuery API. There are few workarounds for this though:
You can use the BigQuery API to query your data and then the GoogleSheets API to upload it to Google Sheets.
You can use Google Apps Script. If you go to this link, you click on “New Script”, you can run the code below. You can adapt this to your needs. You can also add a trigger to run the script every hour/minute …
Here the code snippet from this link:
function runQuery() {
// Replace this value with the project ID listed in the Google
// Cloud Platform project.
var projectId = 'XXXXXXXX';
var request = {
query: 'SELECT TOP(word, 300) AS word, COUNT(*) AS word_count ' +
'FROM publicdata:samples.shakespeare WHERE LENGTH(word) > 10;'
};
var queryResults = BigQuery.Jobs.query(request, projectId);
var jobId = queryResults.jobReference.jobId;
// Check on status of the Query Job.
var sleepTimeMs = 500;
while (!queryResults.jobComplete) {
Utilities.sleep(sleepTimeMs);
sleepTimeMs *= 2;
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId);
}
// Get all the rows of results.
var rows = queryResults.rows;
while (queryResults.pageToken) {
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId, {
pageToken: queryResults.pageToken
});
rows = rows.concat(queryResults.rows);
}
if (rows) {
var spreadsheet = SpreadsheetApp.create('BiqQuery Results');
var sheet = spreadsheet.getActiveSheet();
// Append the headers.
var headers = queryResults.schema.fields.map(function(field) {
return field.name;
});
sheet.appendRow(headers);
// Append the results.
var data = new Array(rows.length);
for (var i = 0; i < rows.length; i++) {
var cols = rows[i].f;
data[i] = new Array(cols.length);
for (var j = 0; j < cols.length; j++) {
data[i][j] = cols[j].v;
}
}
sheet.getRange(2, 1, rows.length, headers.length).setValues(data);
Logger.log('Results spreadsheet created: %s',
spreadsheet.getUrl());
} else {
Logger.log('No rows returned.');
}
}
Utilizing the following AdWords Script to export to BigQuery, the BigQuery.Jobs.insert is causing the script to terminate due to "Empty response". Any reason the call is not getting a response?
var ACCOUNTS = ['xxx','xxx'];
var CONFIG = {
BIGQUERY_PROJECT_ID: 'xxx',
BIGQUERY_DATASET_ID: 'xxx',
// Truncate existing data, otherwise will append.
TRUNCATE_EXISTING_DATASET: true,
TRUNCATE_EXISTING_TABLES: true,
// Back up reports to Google Drive.
WRITE_DATA_TO_DRIVE: false,
// Folder to put all the intermediate files.
DRIVE_FOLDER: 'Adwords Big Query Test',
// Default date range over which statistics fields are retrieved.
DEFAULT_DATE_RANGE: '20140101,20140105',
// Lists of reports and fields to retrieve from AdWords.
REPORTS: [{NAME: 'KEYWORDS_PERFORMANCE_REPORT',
CONDITIONS: 'WHERE Impressions>0',
FIELDS: {'AccountDescriptiveName' : 'STRING',
'Date' : 'STRING',
'CampaignId' : 'STRING',
'CampaignName' : 'STRING',
'AdGroupId' : 'STRING',
'AdGroupName' : 'STRING',
'Id' : 'STRING',
'Criteria' : 'STRING',
'KeywordMatchType' : 'STRING',
'AdNetworkType1' : 'STRING',
'AdNetworkType2' : 'STRING',
'Device' : 'STRING',
'AveragePosition' : 'STRING',
'QualityScore' : 'STRING',
'CpcBid' : 'STRING',
'TopOfPageCpc' : 'STRING',
'Impressions' : 'STRING',
'Clicks' : 'STRING',
'ConvertedClicks' : 'STRING',
'Cost' : 'STRING',
'Conversions' : 'STRING'
}
}],
RECIPIENT_EMAILS: [
'xxx',
]
};
function main() {
createDataset();
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
createTable(reportConfig);
}
folder = getDriveFolder();
// Get an account iterator.
var accountIterator = MccApp.accounts().withIds(ACCOUNTS).withLimit(10).get();
var jobIdMap = {};
while (accountIterator.hasNext()) {
// Get the current account.
var account = accountIterator.next();
// Select the child account.
MccApp.select(account);
// Run reports against child account.
var accountJobIds = processReports(folder, account.getCustomerId());
jobIdMap[account.getCustomerId()] = accountJobIds;
}
waitTillJobsComplete(jobIdMap);
sendEmail(jobIdMap);
}
function createDataset() {
if (datasetExists()) {
if (CONFIG.TRUNCATE_EXISTING_DATASET) {
BigQuery.Datasets.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, {'deleteContents' : true});
Logger.log('Truncated dataset.');
} else {
Logger.log('Dataset %s already exists. Will not recreate.',
CONFIG.BIGQUERY_DATASET_ID);
return;
}
}
// Create new dataset.
var dataSet = BigQuery.newDataset();
dataSet.friendlyName = CONFIG.BIGQUERY_DATASET_ID;
dataSet.datasetReference = BigQuery.newDatasetReference();
dataSet.datasetReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
dataSet.datasetReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
dataSet = BigQuery.Datasets.insert(dataSet, CONFIG.BIGQUERY_PROJECT_ID);
Logger.log('Created dataset with id %s.', dataSet.id);
}
/**
* Checks if dataset already exists in project.
*
* #return {boolean} Returns true if dataset already exists.
*/
function datasetExists() {
// Get a list of all datasets in project.
var datasets = BigQuery.Datasets.list(CONFIG.BIGQUERY_PROJECT_ID);
var datasetExists = false;
// Iterate through each dataset and check for an id match.
if (datasets.datasets != null) {
for (var i = 0; i < datasets.datasets.length; i++) {
var dataset = datasets.datasets[i];
if (dataset.datasetReference.datasetId == CONFIG.BIGQUERY_DATASET_ID) {
datasetExists = true;
break;
}
}
}
return datasetExists;
}
function createTable(reportConfig) {
if (tableExists(reportConfig.NAME)) {
if (CONFIG.TRUNCATE_EXISTING_TABLES) {
BigQuery.Tables.remove(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID, reportConfig.NAME);
Logger.log('Truncated dataset %s.', reportConfig.NAME);
} else {
Logger.log('Table %s already exists. Will not recreate.',
reportConfig.NAME);
return;
}
}
// Create new table.
var table = BigQuery.newTable();
var schema = BigQuery.newTableSchema();
var bigQueryFields = [];
// Add account column to table.
var accountFieldSchema = BigQuery.newTableFieldSchema();
accountFieldSchema.description = 'AccountId';
accountFieldSchema.name = 'AccountId';
accountFieldSchema.type = 'STRING';
bigQueryFields.push(accountFieldSchema);
// Add each field to table schema.
var fieldNames = Object.keys(reportConfig.FIELDS);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var bigQueryFieldSchema = BigQuery.newTableFieldSchema();
bigQueryFieldSchema.description = fieldName;
bigQueryFieldSchema.name = fieldName;
bigQueryFieldSchema.type = reportConfig.FIELDS[fieldName];
bigQueryFields.push(bigQueryFieldSchema);
}
schema.fields = bigQueryFields;
table.schema = schema;
table.friendlyName = reportConfig.NAME;
table.tableReference = BigQuery.newTableReference();
table.tableReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
table.tableReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
table.tableReference.tableId = reportConfig.NAME;
table = BigQuery.Tables.insert(table, CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
Logger.log('Created table with id %s.', table.id);
}
function tableExists(tableId) {
// Get a list of all tables in the dataset.
var tables = BigQuery.Tables.list(CONFIG.BIGQUERY_PROJECT_ID,
CONFIG.BIGQUERY_DATASET_ID);
var tableExists = false;
// Iterate through each table and check for an id match.
if (tables.tables != null) {
for (var i = 0; i < tables.tables.length; i++) {
var table = tables.tables[i];
if (table.tableReference.tableId == tableId) {
tableExists = true;
break;
}
}
}
return tableExists;
}
function processReports(folder, accountId) {
var jobIds = [];
// Iterate over each report type.
for (var i = 0; i < CONFIG.REPORTS.length; i++) {
var reportConfig = CONFIG.REPORTS[i];
Logger.log('Running report %s for account %s', reportConfig.NAME,
accountId);
// Get data as csv
var csvData = retrieveAdwordsReport(reportConfig, accountId);
// If configured, back up data.
if (CONFIG.WRITE_DATA_TO_DRIVE) {
var fileName = reportConfig.NAME + '_' + accountId;
folder.createFile(fileName, csvData, MimeType.CSV);
Logger.log('Exported data to Drive folder ' +
CONFIG.DRIVE_FOLDER + ' for report ' + fileName);
}
// Convert to Blob format.
var blobData = Utilities.newBlob(csvData, 'application/octet-stream');
// Load data
var jobId = loadDataToBigquery(reportConfig, blobData);
jobIds.push(jobId);
}
return jobIds;
}
function retrieveAdwordsReport(reportConfig, accountId) {
var fieldNames = Object.keys(reportConfig.FIELDS);
var report = AdWordsApp.report(
'SELECT ' + fieldNames.join(',') +
' FROM ' + reportConfig.NAME + ' ' + reportConfig.CONDITIONS +
' DURING ' + CONFIG.DEFAULT_DATE_RANGE);
var rows = report.rows();
var csvRows = [];
// Header row
csvRows.push('AccountId,'+fieldNames.join(','));
// Iterate over each row.
while (rows.hasNext()) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
for (var i = 0; i < fieldNames.length; i++) {
var fieldName = fieldNames[i];
var fieldValue = row[fieldName].toString();
var fieldType = reportConfig.FIELDS[fieldName];
/* Strip off % and perform any other formatting here.
if ((fieldType == 'FLOAT' || fieldType == 'INTEGER') &&
fieldValue.charAt(fieldValue.length - 1) == '%') {
fieldValue = fieldValue.substring(0, fieldValue.length - 1);
}*/
// Add double quotes to any string values.
if (fieldType == 'STRING') {
fieldValue = fieldValue.replace(',', ''); //Handle fields with comma in value returned
fieldValue = fieldValue.replace('"', ''); //Handle fields with double quotes in value returned
fieldValue = fieldValue.replace('+', ''); //Handle fields with "+" in value returned
fieldValue = '"' + fieldValue + '"';
}
csvRow.push(fieldValue);
}
csvRows.push(csvRow.join(','));
}
Logger.log('Downloaded ' + reportConfig.NAME + ' for account ' + accountId +
' with ' + csvRows.length + ' rows.');
return csvRows.join('\n');
}
function getDriveFolder() {
var folders = DriveApp.getFoldersByName(CONFIG.DRIVE_FOLDER);
// Assume first folder is the correct one.
if (folders.hasNext()) {
Logger.log('Folder name found. Using existing folder.');
return folders.next();
}
return DriveApp.createFolder(CONFIG.DRIVE_FOLDER);
}
function loadDataToBigquery(reportConfig, data) {
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() + s4() + s4() + s4();
}
var makeId = guid();
var job = {
jobReference: {
jobId: makeId
},
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return job.jobReference.jobId;
}
function waitTillJobsComplete(jobIdMap) {
var complete = false;
var remainingJobs = [];
var accountIds = Object.keys(jobIdMap);
for (var i = 0; i < accountIds.length; i++){
var accountJobIds = jobIdMap[accountIds[i]];
remainingJobs.push.apply(remainingJobs, accountJobIds);
}
while (!complete) {
if (AdWordsApp.getExecutionInfo().getRemainingTime() < 5){
Logger.log('Script is about to timeout, jobs ' + remainingJobs.join(',') +
' are still incomplete.');
}
remainingJobs = getIncompleteJobs(remainingJobs);
if (remainingJobs.length == 0) {
complete = true;
}
if (!complete) {
Logger.log(remainingJobs.length + ' jobs still being processed.');
// Wait 5 seconds before checking status again.
Utilities.sleep(5000);
}
}
Logger.log('All jobs processed.');
}
function getIncompleteJobs(jobIds) {
var remainingJobIds = [];
for (var i = 0; i < jobIds.length; i++) {
var jobId = jobIds[i];
var getJob = BigQuery.Jobs.get(CONFIG.BIGQUERY_PROJECT_ID, jobId);
if (getJob.status.state != 'DONE') {
remainingJobIds.push(jobId);
}
}
return remainingJobIds;
}
It appears the "Empty Response" error is being thrown on:
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Have tried quite a few tweaks, but the answer doesn't appear to obvious to me. Thanks for any help!
I can be wrong but - I think that problem was with jobId because of issue with guid() function - missing "+" sign.
function guid() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return s4() + s4() + s4() + s4() + s4() s4() + s4() + s4();
}
Why not to use jobId from Response like below?
var job = {
configuration: {
load: {
destinationTable: {
projectId: CONFIG.BIGQUERY_PROJECT_ID,
datasetId: CONFIG.BIGQUERY_DATASET_ID,
tableId: reportConfig.NAME
},
skipLeadingRows: 1,
ignoreUnknownValues: true,
allowJaggedRows: true,
allowLargeResults: true
}
}
};
var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
CONFIG.BIGQUERY_PROJECT_ID);
return insertJob.jobReference.jobId;
Added
In this case I would suggest to log jobId (makeId = guid()) and get job status following below link https://cloud.google.com/bigquery/docs/reference/v2/jobs/get#try-it
Enter ProjectId and JobId and you at least will see what is going on with your job!!
AdWords places a "--" in for null values. If you define your report fields as anything but string (e.g., float, integer, etc.) the insert will fail because it can't convert the dash dash to a float or integer.
Try setting all of your fields to string and see if that solves the problem.
Have you tried setting the WRITE_DATA_TO_DRIVE parameter to true to confirm that the report export is successful? How large is the result? I get the same error when attempting an insert greater than 10MB (~25k rows depending on columns). If the file export to Google Drive looks good, you can add a condition to the while loop in retrieveAdwordsReport to limit the file size. There was also a post on https://groups.google.com/forum/#!forum/adwords-scripts mentioning an issue when including AdNetworkType columns: https://groups.google.com/forum/#!searchin/adwords-scripts/adnetworktype2%7Csort:relevance/adwords-scripts/yK57JHCt3Cw/Cl1SjFaQBQAJ.
Limit result size:
var processedRows = 0;
// Iterate over each row.
while (rows.hasNext() && ++processedRows < 5000) {
var row = rows.next();
var csvRow = [];
csvRow.push(accountId);
if (processedRows % 1000 == 0)
{
Logger.log('Processed %s rows.',processedRows);
}
...
From the npm docs, only visible prepared statements are for insert. Does these prepared statement work for Select, update, and delete?
I tried for select, there isn't a .each function where the rows are called back. Anyone been able to do this or have links to resources, cause I can sure as hell unable to find any.
According to the node-sqlite3 API documentation, you can use parameters in your SQL queries in several different ways:
// Directly in the function arguments.
db.run("UPDATE tbl SET name = ? WHERE id = ?", "bar", 2);
// As an array.
db.run("UPDATE tbl SET name = ? WHERE id = ?", [ "bar", 2 ]);
// As an object with named parameters.
db.run("UPDATE tbl SET name = $name WHERE id = $id", {
$id: 2,
$name: "bar"
});
Yes, prepared statements are supported.
With node-sqlite3:
var sqlite3 = require('sqlite3').verbose();
var db = new sqlite3.Database('data.db');
db.serialize(function() {
var stmt = db.prepare("INSERT INTO users VALUES (?,?)");
for (var i = 0; i < 10; i++) {
stmt.run("user " + i, "email " + i);
}
stmt.finalize();
stmt = db.prepare("SELECT * FROM users WHERE id=?");
stmt.each(userId, function(err, row) {
console.log(row.name, row.email);
}, function(err, count) {
stmt.finalize();
});
});
With better-sqlite3:
var Database = require('better-sqlite3');
var db = new Database('foobar.db', options);
var stmt = db.prepare("INSERT INTO users VALUES (?,?)");
for (var i = 0; i < 10; i++) {
stmt.run("user " + i, "email " + i);
}
var stmt = db.prepare('SELECT * FROM users WHERE id=?');
var row = stmt.get(userId);
console.log(row.name, row.email);