Node.js PostgreSQL requests are being terminated prematurely - sql

I´m trying to perform updates on a PostgreSQL db using node but I´m getting an Error: Connection terminated performing UPDATE operation. I´ve probably messed up when handling the connections in a proper async way prematurely terminated them but can´t wrap my head on how I could perform that correctly.
Sorry for the big lump of code but it is pretty straightforward.
//main.js
var result = require('./processes.js');
result.check(function(message) {
console.log(message);
});
result.decrease(); //Throws an error.
//workers.js
var pg = require ('pg');
var fs = require('fs');
var db = JSON.parse(fs.readFileSync('../config.json', 'utf8')).db;
var querydb = require('./querydb.js');
var self = module.exports = {
check: function(callback) {
querydb.select("SELECT size FROM workers WHERE id = 1", function(results) {
callback(results[0].size);
});
},
increase: function() {
self.check(function(workers) {
querydb.update("UPDATE workers SET size = " + (workers + 1) + " WHERE id = 1");
});
},
decrease: function() {
self.check(function(workers) {
querydb.update("UPDATE workers SET size = " + (workers - 1) + " WHERE id = 1");
});
}
};
//querydb.js
var pg = require ('pg');
var fs = require('fs');
var db = JSON.parse(fs.readFileSync('../config.json', 'utf8')).db;
var select = function(statement, callback) {
pg.connect(db, function(err, client, done) {
var results = [];
//Handle errors
if(err) {
done();
console.log(err);
}
var query = client.query(statement);
//Stream results back
query.on('row', function(row) {
results.push(row);
});
//When all data is returned.
query.on('end', function() {
done();
callback(results);
client.end();
});
});
}
var update = function(statement) {
pg.connect(db, function(err, client, done) {
var results = [];
//Handle errors
if(err) {
done();
console.log(err);
}
var query = client.query(statement);
//When all data is returned.
query.on('end', function() {
done();
client.end();
});
});
}
module.exports = {
select: select,
update: update
};

Ok, this maybe isn´t an answer per se and more of a workaround but having synchronous calls for this kind of script (i.e. not a web server) was the desired behavior I wanted so I ended up going with pg-native instead of pg and refactored my querydb and workers modules to use it and now everything works!

Related

Sequelize, findOrCreate + findAll unexpected behavior

I'm trying to fetch data from a dog API and I want to add to my database only their temperaments. I tried using some loops and a split to isolate the data and then using
findOrCreate() to add only those who are not already in the DB, after that I use findAll()
to get that info from the DB to send it using expressJS.
The unexpected behavior comes when I go to the route that executes all this and the route
only gives about half of the temperaments (they are 124 and it displays arround 54), then when I refresh the page it shows all 124 of them. The DB gets populated with all the 124 in one go so the problem is with findAll()
This is the function that isolates the temperaments and append them to the DB:
module.exports = async () => {
const info = await getAllDogs();
info.forEach(async (element) => {
const { temperament } = element;
if (temperament) {
const eachOne = temperament.split(", ");
for (i in eachOne) {
await Temperament.findOrCreate({
where: { name: eachOne[i] },
});
}
}
});
};
And this is the code that gets executed when I hit my expressJS sv to get the info
exports.temperaments = async (req, res) => {
try {
await getTemperaments(); //this function is the above function
} catch (error) {
res.status(500).send("something gone wrong", error);
}
const temperamentsDB = await Temperament.findAll();
res.json(temperamentsDB);
};
So as you can see the last function executes the function that appends all the data to the DB and then sends it with findAll and res.json()
forEach is a synchronous method so it doesn't await a result of the async callback. You need to do for of in order to get wait for all results:
module.exports = async () => {
const info = await getAllDogs();
for (element of info) {
const { temperament } = element;
if (temperament) {
const eachOne = temperament.split(", ");
for (i in eachOne) {
await Temperament.findOrCreate({
where: { name: eachOne[i] },
});
}
}
}
};

Query works but cant retrieve the data

I am new to Node.js (3 days total experience). I am using Node.js and the tedious package to query a database (azure SQL). I use the example as explained here: https://learn.microsoft.com/en-us/azure/azure-sql/database/connect-query-nodejs?tabs=macos
const connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on("connect", err => {
if (err) {
console.error(err.message);
} else {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request(
"SELECT * FROM clients",
(err, rowCount, columns) => {
if (err) {
console.error(err.message);
} else {
console.log(`${rowCount} row(s) returned`);
}
}
);
request.on("row", columns => {
columns.forEach(column => {
console.log("%s\t%s", column.metadata.colName, column.value);
});
});
connection.execSql(request);
}
});
I have two issues:
I do not know how to get the queried data into an object and
If I run the script it does print the items to the console, but it doesn't close the connection after it has done so. If I add a connection.close() at the bottom, it will close the connection before its done. I get the feeling that node.js executes everything at the same time (I am used to Python..).
Update
I found a way to close the connection, to my understanding the request object has several "events" that are predefined by the library. It seems I need to add the event "done" through request.on('done', ...) in order to make sure that it can even BE done. My updated code looks like this:
var connection = new Connection(config);
connection.connect(function(err) {
// If no error, then good to go...
executeStatement();
}
);
connection.on('debug', function(text) {
//remove commenting below to get full debugging.
//console.log(text);
}
);
function executeStatement() {
request = new Request("SELECT * FROM clients", function(err, rowCount) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
connection.close();
});
request.on('row', function(rows) {
_.forEach(rows, function(value, collection){
console.log(value)
console.log(value.value);
console.log(value.metadata.colName)
console.log(collection)
})
});
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
});
// In SQL Server 2000 you may need: connection.execSqlBatch(request);
connection.execSql(request);
}
Anyways, your help would be much appreciated!
Regards
Pieter
The package tedious is synchronous package, it uses the callback to return results. So when we call connection.close(), it will disable connection and stop the callback function. If will want to close the connection, I suggest you use async package to implement it.
For example
const { Connection, Request } = require("tedious");
const async = require("async");
const config = {
authentication: {
options: {
userName: "username", // update me
password: "password", // update me
},
type: "default",
},
server: "your_server.database.windows.net", // update me
options: {
database: "your_database", //update me
encrypt: true,
validateBulkLoadParameters: true,
},
};
const connection = new Connection(config);
let results=[]
function queryDatabase(callback) {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request("SELECT * FROM Person", (err, rowCount) => {
if (err) {
callback(err);
} else {
console.log(`${rowCount} row(s) returned`);
callback(null);
}
});
request.on("row", (columns) => {
let result={}
columns.forEach((column) => {
result[column.metadata.colName]=column.value
console.log("%s\t%s", column.metadata.colName, column.value);
});
// save result into an array
results.push(result)
});
connection.execSql(request);
}
function Complete(err, result) {
if (err) {
callback(err);
} else {
connection.close();
console.log("close connection");
}
}
connection.on("connect", function (err) {
if (err) {
console.log(err);
} else {
console.log("Connected");
// Execute all functions in the array serially
async.waterfall([queryDatabase], Complete);
}
});
connection.connect();
Besides, you also can use the package mssql. It supports asynchronous methods and depends on package tedious. We can directly call close after querying.
For example
const mssql = require("mssql");
const config = {
user: "username",
password: "password",
server: "your_server.database.windows.net",
database: "your_database",
options: {
encrypt: true,
enableArithAbort: true,
},
};
let pool = new mssql.ConnectionPool(config);
async function query() {
try {
await pool.connect();
const request = pool.request();
const result = await request.query("SELECT * FROM Person");
console.dir(result.recordset);
await pool.close();
console.log(pool.connected);
} catch (error) {
throw error;
}
}
query().catch((err) => {
throw err;
});
You can custom a class first and declare an Array to save ojects such as:
let sales = new Array();
class SalesLT{
constructor(catagryName,productName){
this.catagryName = catagryName;
this.productName = productName;
}
Here my sql statement returns 2 properties, so every time the loop takes out two elements from the ColumnValue[].
request.on("row", columns => {
for(let i=0; i<columns.length; i=i+2){
let sale = new SalesLT(columns[i].value,columns[i+1].value);
sales.push(sale);
}
sales.forEach( item => {
console.log("%s\t%s",item.catagryName, item.productName)
})
});
The code is as follows:
const { Connection, Request } = require("tedious");
let sales = new Array();
class SalesLT{
constructor(catagryName,productName){
this.catagryName = catagryName;
this.productName = productName;
}
}
// Create connection to database
const config = {
authentication: {
options: {
userName: "<***>", // update me
password: "<***>" // update me
},
type: "default"
},
server: "<****>.database.windows.net", // update me
options: {
database: "<***>", //update me
encrypt: true
}
};
const connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on ("connect", err => {
if (err) {
console.error(err.message);
} else {
queryDatabase();
}
});
function queryDatabase() {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request(
`SELECT TOP 2 pc.Name as CategoryName,
p.name as ProductName
FROM [SalesLT].[ProductCategory] pc
JOIN [SalesLT].[Product] p ON pc.productcategoryid = p.productcategoryid`,
(err, rowCount) => {
if (err) {
console.error(err.message);
} else {
console.log(`${rowCount} row(s) returned`);
}
connection.close();
}
);
request.on("row", columns => {
for(let i=0; i<columns.length; i=i+2){
let sale = new SalesLT(columns[i].value,columns[i+1].value);
sales.push(sale);
}
sales.forEach( item => {
console.log("%s\t%s",item.catagryName, item.productName)
})
});
connection.execSql(request);
}
this article should help you, to solve all the issues you are facing...which were the same I had when I started using Node :)
https://devblogs.microsoft.com/azure-sql/promises-node-tedious-azure-sql-oh-my/

Node js make request to SQL Server, return result

Working with Node.js and am having some trouble understanding how to return the result after making a request to SQL Server. When running independently and writing to the console, I can get the result just fine, however using it as a function and having it return the result is where I am running into problems.
I'm pretty sure I have to use a callback/promise, but don't really understand how either of those are set up. Hoping someone on here can help me out!
Here is my code:
var sql = require("mssql");
var config = {
user: 'username',
password: 'password',
server: 'localhost',
database: 'Master'
};
function updateTable() {
var connection = new sql.ConnectionPool(config, function(err) {
var request = new sql.Request(connection);
request.query('select LastName from Persons', function(err, result) {
return result.recordset;
});
});
};
console.log(updateTable());
Basically trying to print the result to the console by calling the function through console.log. Right now it's printing 'undefined', but I assume putting in a callback would do the trick. Again, just need some help understanding how it works and getting it set up. Thanks!
If you just want to print it to the console then you can simply tweak your code as follows:
var sql = require("mssql");
var config = {
user: 'username',
password: 'password',
server: 'localhost',
database: 'Master'
};
function updateTable(callback) {
var connection = new sql.ConnectionPool(config, function(err) {
var request = new sql.Request(connection);
request.query('select LastName from Persons', function(err, result) {
callback(result.recordset);
});
});
};
updateTable(console.log);
To send the result from an express handler, assuming >= NodeJS v8:
Wrap the actual database interfacing logic inside an async function which will not block the main thread and export it from your module.
sqlConnector.js
const sql = require('mssql');
const config = {
user: 'username',
password: 'password',
server: 'localhost',
database: 'Master'
};
const updateTable = async () => {
try {
const pool = await sql.connect(config);
const sqlQuery = 'SELECT LastName FROM Persons';
const result = await pool.request().query(sqlQuery);
return result;
} catch (err) {
throw err;
}
};
export.updateTable = updateTable;
In express handler
Import your module which talks with MS-SQL (assumed sqlConnector.js) and mark your handler with the async keyword and return it from res.json at the end.
const sqlConnector = require('sqlConnector');
app.get('/someroute', async (req, res, next) => {
try {
const result = await sqlConnector.updateTable();
return res.status(200).json(result);
} catch (error) {
next(error);
}
});

Redis mocha Test case issue

I have one file call cache.js
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
exports.setExp = function(key, timeLeft, data){
redisCache.set(key, JSON.stringify(data), function (err, reply) {
console.log("error "+err);
console.log("reply "+reply);
if(err) {
console.log("error "+err.command + err.code);
logger.info("This errror on set key related to node_redis");
}
if(reply == 'OK') {
redisCache.expire(key, timeLeft, function (err, reply) {
if(err) {
logger.info("This errror on expire key related to node_redis");
}
if(reply === 1) {
logger.info(key+" key expire time set as "+timeLeft+" successfully!");
}
});
}
});
}
Now I want to write the test case for the above setExp function but some how the node_redis aways return me the err as null and reply as OK
below is my test case.
var cache = require(path.join(__dirname,'..','/cache'));
describe('cache', function () {
it('Cache #setExp() ', function (done) {
var result = cache.setExp(undefined, 0, []);
assert.equal('OK', results);
done()
})
})
IF I change the it should follow the below error I mention as per the node_redis test case
var result = cache.setExp('foo', 10, []);
it should return me the error called ERR wrong number of arguments for 'set' command
var result = cache.setExp(undefined, 0, []);
It should accept the below error log as
assert.equal(err.command, 'SET');
Please suggest me right way to achieve this.
Your thinking seems to be almost completely wrong here.
First of all, you're writing and using setExp as if it's a synchronous operation, but it isn't. It will return before the request is made to redis. It also never returns anything, so even if it was synchronous, result in your tests will always be undefined.
You need to redesign setExp as an asynchronous operation, either by using the async keyword, returning a promise, or having it accept a callback function.
Second of all, if you want to set an expiration on a Redis key, you should set it when you set the key itself, instead of setting the key with no expiration and then trying to add the expiration later. Otherwise you run the risk of the expiration setting failing, and then winding up with an orphaned key that never expires.
Here's an example, using node's util.promisify to as described in the node_redis docs:
var redis = require('redis');
var {promisify} = require('util');
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
var set = promisify(redisCache.set).bind(redisCache);
exports.setExp = function(key, timeLeft, data){
return set(key, JSON.stringify(data), 'EX', timeLeft.toString(10))
.then((reply) => {
if (reply !== 'OK') throw new Error(reply);
return reply;
});
};
In your tests you'd do something like this:
var cache = require('../cache');
describe('cache', function () {
it('Cache #setExp() ', function () {
let key = 'some key';
let timeLeft = 12345;
let data = { foo: 'bar' };
return cache.setExp(key, timeLeft, data)
.then((result) => {
assert.equal('OK', result);
});
});
});
Also, results and result are not the same thing. In your test case, there is no variable called results.
Oh, and don't do this:
var cache = require(path.join(__dirname,'..','/cache'));
require already supports paths relative to __dirname. Just do this:
var cache = require('../cache');

Display multiple Oracle SQL query results on AngularJS page

So I need to display multiple results of SQL query on the same component in AngularJS.
How would I do this? So far, I understood that component can treat only one http request as in here:
'use strict';
angular.module('cryostat', []).component('cryostat', {
templateUrl: 'cryostat/cryostat.template.html',
controller: function cryostatController($http) {
this.pageTitle = "NP04 Cryostat"
this.natalie = 1;
$http.get("cryostat.conn.php")
.then(function (response) {this.TT0101 = response.data.records;});
}
});
I understood that component can treat only one http request
This is not at all true. Where did you hear that? It is easy to make two requests simultaneously with $q.all():
var promiseOne = $http.get("query1.php");
var promiseTwo = $http.get("query2.php");
$q.all([promiseOne, promiseTwo]).then(function(resultArray) {
$scope.resultOne = resultArray[0];
$scope.resultTwo = resultArray[1];
});
Make use of $q.all() for your purpose.
var myData_One, myData_two;
var promises = [];
function promiseA() {
let deferred = $q.defer();
ajaxCall().then((response) => {
$scope.myData_One = response;
deferred.resolve();
}, (error) => {
deferred.reject(error);
});
return deferred.promise;
}
function promiseB() {
let deferred = $q.defer();
ajaxCall().then((response) => {
$scope.myData_two = response;
deferred.resolve(response);
}, (error) => {
deferred.reject(error);
});
return deferred.promise;
}
var promises = [promiseA(), promiseB()];
$q.all(promises).then((values) => {
//Do whatever you wish to with response data from both promises.
});