Node.js mssql Global connection already exists connection pool memory leak - sql

New to Node. I have looked at some other questions, and haven't found a direct answer. I'm creating a module for a simple Sql data call, but am getting the "Global connection already exists" error. I believe my solution lies in using a connectionPool, but I am uncertain of how to create a connection pool or if I'm doing it correctly. Any help is appreciated. I have removed any sensitive login data as well.
UPDATE: by switching my sql.close() and putting it before my onDone&&onDone() callback function, I now am receiving the correct data, but node is now giving warnings of " Warning: Possible EventEmitter memory leak detected. 11 error listeners added. Use emitter.setMaxListeners() to increase limit"
What's not working: I can successfully run my first call, but then every call after that returns nothing. I have verified using SQL profiler that only one call is being made to the server. So I believe something in my code is bypassing the second call and returning a null result set?
const self = {};
// NPM package that supports accessing Microsoft SQL databases from node.
let sql = require('mssql');
let debug = false;
let connection;
// configure SQL Connection. The credentials below should be handled more securely.
// While this backend code is not accessible via public web accesses, it will be in
// GitHub.
let config = {
domain: "hidden",
user: "hidden",
password: 'hidden',
server: 'hidden',
database: 'hidden',
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
self.getSQLData = function getSQLData (sprocInfo,onDone,onRow) {
// Connect to SQL Server.
if(debug) console.log("Hit SQL Function \n");
if (typeof(sprocInfo.parameters) === 'object' && Array.isArray(sprocInfo.parameters)){
if(debug)console.log("Passed array check");
if(sprocInfo.database) config.database = sprocInfo.database;
if(connection && connection._connected){
if(debug) console.log("Inside SQL Connection");
const request = new sql.Request(connection); //create a new request
request.stream = true;
var results = [];
//put all parameters as input parameters to sql call
sprocInfo.parameters.forEach(function(element){
request.input(element.parameterName.toString(),sql.NVarChar,element.parameterValue);
//NEED to remove hard typing of Integers. Maybe case statement based on typeof??
});
request.on('row', row => {
results.push(row);
onRow && onRow(row);
});
request.on('done', result => {
// console.log("Result in request.on done"+JSON.stringify(result));
onDone && onDone(results);
sql.close();
});
request.on('error', err => {
// May be emitted multiple times
if ( debug ) console.log('error');
if ( debug ) console.log('the error is: ' + err);
});
request.execute(sprocInfo.procedure)
} else {
connection = sql.connect(config, err => {
if(debug) console.log("Inside SQL Connection");
const request = new sql.Request(); //create a new request
request.stream = true;
var results = [];
//put all parameters as input parameters to sql call
sprocInfo.parameters.forEach(function(element){
request.input(element.parameterName.toString(),sql.NVarChar,element.parameterValue);
//NEED to remove hard typing of Integers. Maybe case statement based on typeof??
});
request.on('row', row => {
results.push(row);
onRow && onRow(row);
});
request.on('done', result => {
// console.log("Result in request.on done"+JSON.stringify(result));
onDone && onDone(results);
sql.close();
});
request.on('error', err => {
// May be emitted multiple times
if ( debug ) console.log('error');
if ( debug ) console.log('the error is: ' + err);
});
request.execute(sprocInfo.procedure)
});
}
sql.on('error', err => {
console.log("Error: Failed to connect to SQL Server");
});
} else {
throw new Error ("Parameters is not an array");
}
};
module.exports = self;

Related

Node.js wait("await") for SQL database query before proceeding

I have spent a lot of time reading up on this but I simply don't get how to solve it.
I have an application that uses a token that is stored in a SQL database. I need that token before the application can proceed.
I'm trying to solve it with "await" but it doesn't work. The SQL query result is still retrieved "too late".
const pool = mysql.createPool({
user : 'xxxx', // e.g. 'my-db-user'
password : "xxxx", // e.g. 'my-db-password'
database : "xxxx", // e.g. 'my-database'
// If connecting via localhost, specify the ip
host : "xxxx"
// If connecting via unix domain socket, specify the path
//socketPath : `/cloudsql/xxxx`,
});
const isAuthorized = async (userId) => {
let query = "SELECT * FROM auth WHERE id = 2";
await pool.query(query, (error,results) => {
if (!results[0]) {
console.log("No results");
return
} else {
tokenyay=results[0].refreshtoken;
console.log("results: "+results[0].refreshtoken);
return results[0].refreshtoken;
}
});
await console.log("tokenyay: "+tokenyay);
if (tokenyay != null && tokenyay != '') {
refreshTokenStore[userId] = tokenyay;
}
console.log(userId);
console.log(refreshTokenStore[userId] ? true : false);
return refreshTokenStore[userId] ? true : false;
};
I don't know what your pool is, but judging from the callback, we can see that pool.query() method is not await-able.
You can manually create a Promise for it, though, which is await-able, for example
await new Promise((resolve, reject) => {
pool.query(query, (error, results) => {
if (error) reject(error);
if (!results[0]) {
console.log("No results");
resolve(); // give `undefined` to the `await...` and make it stop waiting
return;
} else {
tokenyay = results[0].refreshtoken;
console.log("results: " + results[0].refreshtoken);
resolve(results[0].refreshtoken);
}
})
});
Edit:
However, since the result of await is obtained from the value passed to the resolve, we don't need to assign tokenyay inside of the callback.
We can use tokenyay = await... instead.
tokenyay = await new Promise((resolve, reject) => {
pool.query(query, (error, results) => {
if (error) reject(error);
if (!results[0]) {
console.log("No results");
resolve(); // give `undefined` to the `await...` and make it stop waiting
return;
} else {
console.log("results: " + results[0].refreshtoken);
resolve(results[0].refreshtoken);
}
})
});

Redis mocha Test case issue

I have one file call cache.js
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
exports.setExp = function(key, timeLeft, data){
redisCache.set(key, JSON.stringify(data), function (err, reply) {
console.log("error "+err);
console.log("reply "+reply);
if(err) {
console.log("error "+err.command + err.code);
logger.info("This errror on set key related to node_redis");
}
if(reply == 'OK') {
redisCache.expire(key, timeLeft, function (err, reply) {
if(err) {
logger.info("This errror on expire key related to node_redis");
}
if(reply === 1) {
logger.info(key+" key expire time set as "+timeLeft+" successfully!");
}
});
}
});
}
Now I want to write the test case for the above setExp function but some how the node_redis aways return me the err as null and reply as OK
below is my test case.
var cache = require(path.join(__dirname,'..','/cache'));
describe('cache', function () {
it('Cache #setExp() ', function (done) {
var result = cache.setExp(undefined, 0, []);
assert.equal('OK', results);
done()
})
})
IF I change the it should follow the below error I mention as per the node_redis test case
var result = cache.setExp('foo', 10, []);
it should return me the error called ERR wrong number of arguments for 'set' command
var result = cache.setExp(undefined, 0, []);
It should accept the below error log as
assert.equal(err.command, 'SET');
Please suggest me right way to achieve this.
Your thinking seems to be almost completely wrong here.
First of all, you're writing and using setExp as if it's a synchronous operation, but it isn't. It will return before the request is made to redis. It also never returns anything, so even if it was synchronous, result in your tests will always be undefined.
You need to redesign setExp as an asynchronous operation, either by using the async keyword, returning a promise, or having it accept a callback function.
Second of all, if you want to set an expiration on a Redis key, you should set it when you set the key itself, instead of setting the key with no expiration and then trying to add the expiration later. Otherwise you run the risk of the expiration setting failing, and then winding up with an orphaned key that never expires.
Here's an example, using node's util.promisify to as described in the node_redis docs:
var redis = require('redis');
var {promisify} = require('util');
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
var set = promisify(redisCache.set).bind(redisCache);
exports.setExp = function(key, timeLeft, data){
return set(key, JSON.stringify(data), 'EX', timeLeft.toString(10))
.then((reply) => {
if (reply !== 'OK') throw new Error(reply);
return reply;
});
};
In your tests you'd do something like this:
var cache = require('../cache');
describe('cache', function () {
it('Cache #setExp() ', function () {
let key = 'some key';
let timeLeft = 12345;
let data = { foo: 'bar' };
return cache.setExp(key, timeLeft, data)
.then((result) => {
assert.equal('OK', result);
});
});
});
Also, results and result are not the same thing. In your test case, there is no variable called results.
Oh, and don't do this:
var cache = require(path.join(__dirname,'..','/cache'));
require already supports paths relative to __dirname. Just do this:
var cache = require('../cache');

Request Data from MSSQL with Node.js -- Error

I want to connect and Request from MSSQL using nodejs to link it with magento.
I Am trying to fix it for days now but it ends on the same way...
This is my Error Code:
Connected
{ RequestError: Requests can only be made in the LoggedIn state, not the Connecting state
at RequestError (C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\errors.js:34:12)
at Connection.makeRequest (C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\connection.js:1423:33)
at Connection.execSql (C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\connection.js:1194:19)
at executeStatement (C:\Workspace\Visual-Code\nodeApi\nodeapi.js:41:20)
at Connection.<anonymous> (C:\Workspace\Visual-Code\nodeApi\nodeapi.js:14:9)
at emitOne (events.js:116:13)
at Connection.emit (events.js:211:7)
at Connection.socketError (C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\connection.js:869:14)
at C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\connection.js:739:25
at GetAddrInfoReqWrap.callback (C:\Workspace\Visual-Code\nodeApi\node_modules\tedious\lib\connector.js:68:18)
message: 'Requests can only be made in the LoggedIn state, not the
Connecting state',
code: 'EINVALIDSTATE' }
I searched a lot and found similar problems but nothing solved it...
This is my Code maybe you can help me Spot the mistake.
var Connection = require('tedious').Connection;
var config = {
userName: 'Cool userName',
password: 'awesome password',
server: 'amazing server',
options: {
database: 'database',
}
};
var connection = new Connection(config);
connection.on('connect', function(err) {
// If no error, then good to proceed.
console.log("Connected");
executeStatement();
});
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement() {
request = new Request("select * from Artikelstamm;", function(err) {
if (err) {
console.log(err);}
});
var result = "";
request.on('row', function(columns) {
columns.forEach(function(column) {
if (column.value === null) {
console.log('NULL');
} else {
result+= column.value + " ";
}
});
console.log(result);
result ="";
});
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
});
connection.execSql(request);
}
Set Firewall rule to your DB by adding CleintIP. In Azure SQLDB, there is SET FIREWALL RULE button. You can use it to add IP.
Add this event to your request
request.on("requestCompleted", function () {
connection.close();
resolve(result);
});

Node JS Express - Oracle Connection Pooling (ORA-24418: Cannot open further sessions)

I'm having issues with a the Oracle DB module:
https://github.com/oracle/node-oracledb/blob/master/doc/api.md
I have an application which has between 300 and 900 hits per hour (normally from around 100 users). The application has many $.post requests in the background to retrieve information from a database and display to the user.
I've recently switched to this module as it is Oracle's own (I was using https://github.com/joeferner/node-oracle previously).
Here's how I have it set out:
/bin/www
oracledb.createPool(
{
user : 'USER'
password : 'PASS',
connectString : 'DB:1521/SID:POOLED',
connectionClass : 'ARBITRARY_NAME',
poolMin : 1,
poolMax : 50,
poolTimeout : 300
},
function(err, pool)
{
if (err) {
console.log(err);
}
require('../libs/db')(pool); // Export pool to separate file
}
)
/libs/db.js
module.exports = function(pool) {
// Require oracle module for formatting
var oracledb = require('oracledb');
// Export acquire and query function
module.exports.acquire_and_query = function(sql, params, callback){
// ACQUIRE connection from pool
pool.getConnection(function(err, connection){
// NUMBER OF CONNCETIONS OPEN
console.log("ORACLE: CONNX OPEN: " + pool.connectionsOpen);
// NUMBER OF CONNEXTIONS IN USE
console.log("ORACLE: CONNX IN USE: " + pool.connectionsInUse);
if (err) {
console.log(err.message);
return;
}
// Use connection to QUERY db and return JSON object
connection.execute(sql, params, {maxRows: 1000, isAutoCommit: true, outFormat: oracledb.OBJECT}, function(err, result){
// Error Handling
if (err) {
console.log(err.message); // Log the error
return false; // Return false for our error handling
}
// Release the connection back to the pool
connection.release(function(err) {
if (err) {
console.log(err.message);
return;
}
})
// Return callback with rowset first, out bind paramaters second
return callback(result.rows, result.outBinds, result.rowsAffected);
})
})
}
}
This module "acquire_and_query" is called from with in our application, and has SQL and it's params passed in to be executed.
The Oracle DB has a maximum allowed of Pooled connections set to 80 (and we are not exceeding them) - and generally looks pretty happy.
The node application however is constantly throwing out an ORA-24418: Cannot open further sessions, and I am unsure how to resolve this.
Thanks.
Resolved - Issue: My poor coding!
I had unwittingly set a Socket.IO event to update the view for EVERYONE connected multiple times (rather than querying the database once, then sending the view over the socket...) sigh
I was also even more stupidly using a for loop in a transaction based query (which was INSERTing multiple data on each run)... Once I had changed this to a recursive pattern - it went ran swimmingly!
Good article here about for loops and recursive patterns: http://www.richardrodger.com/2011/04/21/node-js-how-to-write-a-for-loop-with-callbacks/#.VaQjJJNViko
Anyway - here's what I use now (and it works rather well)
Using node-oracledb v0.6 (https://github.com/oracle/node-oracledb) and Express 4 (http://expressjs.com/)
bin/www
/**
* Database
*/
// AS PER DOCUMENTATION: https://github.com/oracle/node-oracledb/blob/master/examples/dbconfig.js
var dbconfig = require("../libs/dbconfig.js");
oracledb.connectionClass = dbconfig.connectionClass,
oracledb.createPool({
user: dbconfig.user,
password: dbconfig.password,
connectString: dbconfig.connectString,
poolMax: 44,
poolMin: 2,
poolIncrement: 5,
poolTimeout: 4
}, function(err, pool) {
if (err) {
console.log("ERROR: ", new Date(), ": createPool() callback: " + err.message);
return;
}
require('../libs/oracledb.js')(pool);
});
libs/oracledb.js
module.exports = function(pool) {
////////////////////////////
// INSTANTIATE THE DRIVER //
////////////////////////////
var oracledb = require("oracledb");
//////////////////////
// GET A CONNECTION //
//////////////////////
var doConnect = function(callback) {
console.log("INFO: Module getConnection() called - attempting to retrieve a connection using the node-oracledb driver");
pool.getConnection(function(err, connection) {
// UNABLE TO GET CONNECTION - CALLBACK WITH ERROR
if (err) {
console.log("ERROR: Cannot get a connection: ", err);
return callback(err);
}
// If pool is defined - show connectionsOpen and connectionsInUse
if (typeof pool !== "undefined") {
console.log("INFO: Connections open: " + pool.connectionsOpen);
console.log("INFO: Connections in use: " + pool.connectionsInUse);
}
// Else everything looks good
// Obtain the Oracle Session ID, then return the connection
doExecute(connection, "SELECT SYS_CONTEXT('userenv', 'sid') AS session_id FROM DUAL", {}, function(err, result) {
// Something went wrong, releae the connection and return the error
if (err) {
console.log("ERROR: Unable to determine Oracle SESSION ID for this transaction: ", err);
releaseConnection(connection);
return callback(err);
}
// Log the connection ID (we do this to ensure the conncetions are being pooled correctly)
console.log("INFO: Connection retrieved from the database, SESSION ID: ", result.rows[0]['SESSION_ID']);
// Return the connection for use in model
return callback(err, connection);
});
});
}
/////////////
// EXECUTE //
/////////////
var doExecute = function(connection, sql, params, callback) {
connection.execute(sql, params, { autoCommit: false, outFormat: oracledb.OBJECT, maxRows:1000 }, function(err, result) {
// Something went wrong - handle the data and release the connection
if (err) {
console.log("ERROR: Unable to execute the SQL: ", err);
//releaseConnection(connection);
return callback(err);
}
// Return the result to the request initiator
// console.log("INFO: Result from Database: ", result)
return callback(err, result);
});
}
////////////
// COMMIT //
////////////
var doCommit = function(connection, callback) {
connection.commit(function(err) {
if (err) {
console.log("ERROR: Unable to COMMIT transaction: ", err);
}
return callback(err, connection);
});
}
//////////////
// ROLLBACK //
//////////////
var doRollback = function(connection, callback) {
connection.rollback(function(err) {
if (err) {
console.log("ERROR: Unable to ROLLBACK transaction: ", err);
}
return callback(err, connection);
});
}
//////////////////////////
// RELEASE A CONNECTION //
//////////////////////////
var doRelease = function(connection) {
connection.release(function(err) {
if (err) {
console.log("ERROR: Unable to RELEASE the connection: ", err);
}
return;
});
}
//////////////////////////////
// EXPORT THE FUNCTIONALITY //
//////////////////////////////
module.exports.doConnect = doConnect;
module.exports.doExecute = doExecute;
module.exports.doCommit = doCommit;
module.exports.doRollback = doRollback;
module.exports.doRelease = doRelease;
}
Example Usage
//////////////////////////////
// REQUIRE RELEVANT MODULES //
//////////////////////////////
var db = require("../libs/oracledb.js");
var oracledb = require('oracledb');
var sql = "";
///////////////////////////
// RETRIEVE CURRENT DATE //
///////////////////////////
module.exports.getCurDate = function(callback) {
sql = "SELECT CURRENT_DATE FROM DUAL";
db.doConnect(function(err, connection){
console.log("INFO: Database - Retrieving CURRENT_DATE FROM DUAL");
if (err) {
console.log("ERROR: Unable to get a connection ");
return callback(err);
} else {
db.doExecute(
connection, sql
, {} // PASS BIND PARAMS IN HERE - SEE ORACLEDB DOCS
, function(err, result) {
if (err) {
db.doRelease(connection); // RELEASE CONNECTION
return callback(err); // ERROR
} else {
db.doRelease(connection); // RELEASE CONNECTION
return callback(err, result.rows); // ALL IS GOOD
}
}
);
}
});
}
This error message is raised when the sessMax parameter supplied in OCISessionPoolCreate has been reached.
So, my first move would be verify if database sessions are being closed correctly.
When this error message arises execute the following three actions:
1.- (using sqlplus) show parameter sess
2.- (using sqlplus)
select username,machine,program,count(*) from v$session
group by username,machine ,program
order by 4 ;
3.- verify in alert.log if there are any other ORA- messages during this event.
Did you perform this steps ? (share your results)

Node SQL Server mssql streaming

I am new to node and am working with mssql in order to connection to SQL Server. would anyone be able to assist in giving me a fuller example of mssql streaming. I find the git example vague and don't know where to start. Any assistance would be much appreciated.
var sql = require('mssql');
var config = {
user: '...',
password: '...',
server: 'localhost', // You can use 'localhost\\instance' to connect to named instance
database: '...',
stream: true,
options: {// Use this if you're on Windows Azure
}
}
sql.connect(config, function(err) {
var request = new sql.Request();
request.stream = true; // You can set streaming differently for each request
request.query('select * from verylargetable'); // or request.execute(procedure);
request.on('recordset', function(columns) {
// Emitted once for each recordset in a query
});
request.on('row', function(row) {
// Emitted for each row in a recordset
});
request.on('error', function(err) {
// May be emitted multiple times
});
request.on('done', function(returnValue) {
// Always emitted as the last one
});
});
I'm going to Holy Necro this post because I ran into the same problem today and would like to leave something that might help future me.
According to ExpressJs documentation, the proper way to stream any large set of data is to write it to the response, flush the response occasionally and then when done, end the response.
mssql on NPM states that there are a few events you can subscribe to, like the ones listed in your excerpt from their documentation. This is great, but how do you integrate the two?
Well, I came up with the following solution (might not be the best, but hey, it works)
The idea is to stream the data from SQL record for record, but only flush the data to the caller in batches of 50. Once done, end the response.
I also needed it back in Array format so I had to construct the beginning, separators and end myself for this.
exports.listAllRecordsInReallyBigDataTable = (req, res) => {
const config = {
...
}
sql.connect(config, () => {
res.setHeader('Cache-Control', 'no-cache');
const request = new sql.Request();
request.stream = true;
request.query('select * from myBigTableOrView');
let rowCount = 0;
const BATCH_SIZE = 50;
request.on('recordset', () => {
res.setHeader('Content-Type', 'application/json');
res.write('[');
}
request.on('row', row => {
if (rowCount > 0)
res.write(',');
if (rows % BATCH_SIZE === 0)
res.flush();
res.write(JSON.stringify(row));
rowCount++;
}
request.on('done', ()=> {
res.write(']');
sql.close();
res.end();
};
};
};
right way to do this without any recordset consideration is
let request = new sql.Request();
request.stream = true;
request.query(*query*);
request.on('row', row => {
// Emitted for each row in a recordset
})
request.on('error', err => {
console.log(err);
// May be emitted multiple times
})
request.on('done', result => {
console.log("done ", result);
// Always emitted as the last one
})