Node SQL Server mssql streaming - sql

I am new to node and am working with mssql in order to connection to SQL Server. would anyone be able to assist in giving me a fuller example of mssql streaming. I find the git example vague and don't know where to start. Any assistance would be much appreciated.
var sql = require('mssql');
var config = {
user: '...',
password: '...',
server: 'localhost', // You can use 'localhost\\instance' to connect to named instance
database: '...',
stream: true,
options: {// Use this if you're on Windows Azure
}
}
sql.connect(config, function(err) {
var request = new sql.Request();
request.stream = true; // You can set streaming differently for each request
request.query('select * from verylargetable'); // or request.execute(procedure);
request.on('recordset', function(columns) {
// Emitted once for each recordset in a query
});
request.on('row', function(row) {
// Emitted for each row in a recordset
});
request.on('error', function(err) {
// May be emitted multiple times
});
request.on('done', function(returnValue) {
// Always emitted as the last one
});
});

I'm going to Holy Necro this post because I ran into the same problem today and would like to leave something that might help future me.
According to ExpressJs documentation, the proper way to stream any large set of data is to write it to the response, flush the response occasionally and then when done, end the response.
mssql on NPM states that there are a few events you can subscribe to, like the ones listed in your excerpt from their documentation. This is great, but how do you integrate the two?
Well, I came up with the following solution (might not be the best, but hey, it works)
The idea is to stream the data from SQL record for record, but only flush the data to the caller in batches of 50. Once done, end the response.
I also needed it back in Array format so I had to construct the beginning, separators and end myself for this.
exports.listAllRecordsInReallyBigDataTable = (req, res) => {
const config = {
...
}
sql.connect(config, () => {
res.setHeader('Cache-Control', 'no-cache');
const request = new sql.Request();
request.stream = true;
request.query('select * from myBigTableOrView');
let rowCount = 0;
const BATCH_SIZE = 50;
request.on('recordset', () => {
res.setHeader('Content-Type', 'application/json');
res.write('[');
}
request.on('row', row => {
if (rowCount > 0)
res.write(',');
if (rows % BATCH_SIZE === 0)
res.flush();
res.write(JSON.stringify(row));
rowCount++;
}
request.on('done', ()=> {
res.write(']');
sql.close();
res.end();
};
};
};

right way to do this without any recordset consideration is
let request = new sql.Request();
request.stream = true;
request.query(*query*);
request.on('row', row => {
// Emitted for each row in a recordset
})
request.on('error', err => {
console.log(err);
// May be emitted multiple times
})
request.on('done', result => {
console.log("done ", result);
// Always emitted as the last one
})

Related

multiple SQL queries as one request on sequelize js

Is there a way for sequelize to send multiple queries as one request? Or does sequelize already does that?
Below is my code:
const transaction = await db.transaction();
try {
await Resource.bulkCreate(resourcesTable);
await postContacts(contactsTable, locationsTable);
await postResourceTag(resourceTagTable);
await transaction.commit();
} catch (err) {
logger.error(err);
await transaction.rollback();
throw new HttpError(500, "Database Error"); // placeholder until validation setup
}
const postContacts = async ({ contactsTable, locationsTable }) => {
if (contactsTable?.length) {
await Contact.bulkCreate(contactsTable);
if (locationsTable?.length) {
await Location.bulkCreate(locationsTable);
}
}
};
const postResourceTag = async (resourceTagTable) => {
if (resourceTagTable?.length) {
await Resource_Tag.bulkCreate(resourceTagTable);
}
};
Currently, I believe the bulkCreates are sent as separate requests? I'd prefer if they were sent as one single request like how it'd be if the SQL strings was concatenated and then sent.
Based on this answer you can use an array of promises and then handle them all in one point, is that what you might be looking for?

Netlify server functions unable to handle multipart/form-data

I’m working on a Netlify Function where we take form data for a job application (including a file upload) and pass the data on to a third-party API for use in their system. I was following along with this handy post (thanks!) —
https://www.netlify.com/blog/2021/07/29/how-to-process-multipart-form-data-with-a-netlify-function/
— but seem to have run into a situation where the data in the file is not handled properly (for example, PDFs turn up with blank content, though ASCII metadata appears to be at least partly intact), at least when using the Netlify CLI; I have yet to try on a deploy preview. Writing to a local directory confirms that the issue isn’t with the third party API. Is there something I’m missing when working with these files? Example code below (note that I’ve also attempted to work with the Buffer data, with identical results).
Fetch function to call the Netlify Function:
const data = new FormData(form);
fetch('/.netlify/functions/apply', {
method: 'POST',
body: data,
}).then(res => {
if (!res.ok && res.status !== 406) {
throw new Error('oh no');
}
return res.json();
}).then(data => {
if (Array.isArray(data.missingRequiredFields) && data.missingRequiredFields.length > 0) {
console.log(data);
showMissingFields(data.missingRequiredFields);
} else {
showConfirmationMessage(data.message);
}
}).catch(err => {
showWarningMessage('Something went wrong; please try again.');
}).finally(() => {
submitButton.removeAttribute('disabled');
});
And here’s our Netlify Function:
const Busboy = require("busboy");
const FormData = require("form-data");
const fetch = require("node-fetch");
function parseMultipartForm(event) {
// source: https://www.netlify.com/blog/2021/07/29/how-to-process-multipart-form-data-with-a-netlify-function/
return new Promise(resolve => {
const fields = {};
const busboy = new Busboy({
// uses request headers to extract the form boundary value
headers: event.headers,
});
// before parsing anything, we need to set up some handlers.
// whenever busboy comes across a file ...
const f = [];
busboy.on("file", (fieldname, filestream, filename, transferEncoding, mimeType) => {
// ... we take a look at the file's data ...
filestream.on("data", data => {
fields[fieldname] = {
filename,
type: mimeType,
content: data,
transferEncoding,
};
});
});
// whenever busboy comes across a normal field ...
busboy.on("field", (fieldName, value) => {
// ... we write its value into `fields`.
fields[fieldName] = value;
});
// once busboy is finished, we resolve the promise with the resulted fields.
busboy.on("finish", () => {
resolve(fields);
});
// now that all handlers are set up, we can finally start processing our request!
busboy.write(event.body);
});
}
/** ***************************************************************************
* Serverless function
**************************************************************************** */
exports.handler = async function(event, context) {
// parse the incoming multipart/form-data data into fields object
const fields = await parseMultipartForm(event);
// create new formdata object to be send to Lever
const form = new FormData();
for (const [key, value] of Object.entries(fields)) {
if (key === "resume") {
// append "resume" with the file buffer and add the file name
form.append("resume", value.content, { filename: value.filename });
} else {
form.append(key, value);
}
}
};
Any guidance you might have would be greatly appreciated.

Redis mocha Test case issue

I have one file call cache.js
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
exports.setExp = function(key, timeLeft, data){
redisCache.set(key, JSON.stringify(data), function (err, reply) {
console.log("error "+err);
console.log("reply "+reply);
if(err) {
console.log("error "+err.command + err.code);
logger.info("This errror on set key related to node_redis");
}
if(reply == 'OK') {
redisCache.expire(key, timeLeft, function (err, reply) {
if(err) {
logger.info("This errror on expire key related to node_redis");
}
if(reply === 1) {
logger.info(key+" key expire time set as "+timeLeft+" successfully!");
}
});
}
});
}
Now I want to write the test case for the above setExp function but some how the node_redis aways return me the err as null and reply as OK
below is my test case.
var cache = require(path.join(__dirname,'..','/cache'));
describe('cache', function () {
it('Cache #setExp() ', function (done) {
var result = cache.setExp(undefined, 0, []);
assert.equal('OK', results);
done()
})
})
IF I change the it should follow the below error I mention as per the node_redis test case
var result = cache.setExp('foo', 10, []);
it should return me the error called ERR wrong number of arguments for 'set' command
var result = cache.setExp(undefined, 0, []);
It should accept the below error log as
assert.equal(err.command, 'SET');
Please suggest me right way to achieve this.
Your thinking seems to be almost completely wrong here.
First of all, you're writing and using setExp as if it's a synchronous operation, but it isn't. It will return before the request is made to redis. It also never returns anything, so even if it was synchronous, result in your tests will always be undefined.
You need to redesign setExp as an asynchronous operation, either by using the async keyword, returning a promise, or having it accept a callback function.
Second of all, if you want to set an expiration on a Redis key, you should set it when you set the key itself, instead of setting the key with no expiration and then trying to add the expiration later. Otherwise you run the risk of the expiration setting failing, and then winding up with an orphaned key that never expires.
Here's an example, using node's util.promisify to as described in the node_redis docs:
var redis = require('redis');
var {promisify} = require('util');
var redisCache = redis.createClient(port, name);
redisCache.on("error", function(err) {
logger.error("Error connecting to redis", err);
});
var set = promisify(redisCache.set).bind(redisCache);
exports.setExp = function(key, timeLeft, data){
return set(key, JSON.stringify(data), 'EX', timeLeft.toString(10))
.then((reply) => {
if (reply !== 'OK') throw new Error(reply);
return reply;
});
};
In your tests you'd do something like this:
var cache = require('../cache');
describe('cache', function () {
it('Cache #setExp() ', function () {
let key = 'some key';
let timeLeft = 12345;
let data = { foo: 'bar' };
return cache.setExp(key, timeLeft, data)
.then((result) => {
assert.equal('OK', result);
});
});
});
Also, results and result are not the same thing. In your test case, there is no variable called results.
Oh, and don't do this:
var cache = require(path.join(__dirname,'..','/cache'));
require already supports paths relative to __dirname. Just do this:
var cache = require('../cache');

Node.js mssql Global connection already exists connection pool memory leak

New to Node. I have looked at some other questions, and haven't found a direct answer. I'm creating a module for a simple Sql data call, but am getting the "Global connection already exists" error. I believe my solution lies in using a connectionPool, but I am uncertain of how to create a connection pool or if I'm doing it correctly. Any help is appreciated. I have removed any sensitive login data as well.
UPDATE: by switching my sql.close() and putting it before my onDone&&onDone() callback function, I now am receiving the correct data, but node is now giving warnings of " Warning: Possible EventEmitter memory leak detected. 11 error listeners added. Use emitter.setMaxListeners() to increase limit"
What's not working: I can successfully run my first call, but then every call after that returns nothing. I have verified using SQL profiler that only one call is being made to the server. So I believe something in my code is bypassing the second call and returning a null result set?
const self = {};
// NPM package that supports accessing Microsoft SQL databases from node.
let sql = require('mssql');
let debug = false;
let connection;
// configure SQL Connection. The credentials below should be handled more securely.
// While this backend code is not accessible via public web accesses, it will be in
// GitHub.
let config = {
domain: "hidden",
user: "hidden",
password: 'hidden',
server: 'hidden',
database: 'hidden',
options: {
encrypt: false // Use this if you're on Windows Azure
}
};
self.getSQLData = function getSQLData (sprocInfo,onDone,onRow) {
// Connect to SQL Server.
if(debug) console.log("Hit SQL Function \n");
if (typeof(sprocInfo.parameters) === 'object' && Array.isArray(sprocInfo.parameters)){
if(debug)console.log("Passed array check");
if(sprocInfo.database) config.database = sprocInfo.database;
if(connection && connection._connected){
if(debug) console.log("Inside SQL Connection");
const request = new sql.Request(connection); //create a new request
request.stream = true;
var results = [];
//put all parameters as input parameters to sql call
sprocInfo.parameters.forEach(function(element){
request.input(element.parameterName.toString(),sql.NVarChar,element.parameterValue);
//NEED to remove hard typing of Integers. Maybe case statement based on typeof??
});
request.on('row', row => {
results.push(row);
onRow && onRow(row);
});
request.on('done', result => {
// console.log("Result in request.on done"+JSON.stringify(result));
onDone && onDone(results);
sql.close();
});
request.on('error', err => {
// May be emitted multiple times
if ( debug ) console.log('error');
if ( debug ) console.log('the error is: ' + err);
});
request.execute(sprocInfo.procedure)
} else {
connection = sql.connect(config, err => {
if(debug) console.log("Inside SQL Connection");
const request = new sql.Request(); //create a new request
request.stream = true;
var results = [];
//put all parameters as input parameters to sql call
sprocInfo.parameters.forEach(function(element){
request.input(element.parameterName.toString(),sql.NVarChar,element.parameterValue);
//NEED to remove hard typing of Integers. Maybe case statement based on typeof??
});
request.on('row', row => {
results.push(row);
onRow && onRow(row);
});
request.on('done', result => {
// console.log("Result in request.on done"+JSON.stringify(result));
onDone && onDone(results);
sql.close();
});
request.on('error', err => {
// May be emitted multiple times
if ( debug ) console.log('error');
if ( debug ) console.log('the error is: ' + err);
});
request.execute(sprocInfo.procedure)
});
}
sql.on('error', err => {
console.log("Error: Failed to connect to SQL Server");
});
} else {
throw new Error ("Parameters is not an array");
}
};
module.exports = self;

Node.js and MSSQL stored procedures

What would be the suggested way (if even possible) to call MSSQL SP from Node.js. The documentation https://www.npmjs.org/package/mssql is great but there is no mention of SP (stored procedure) anywhere.
The linked document does actually mention stored procedures:
var request = new sql.Request(connection);
request.input('input_parameter', sql.Int, 10);
request.output('output_parameter', sql.VarChar(50));
request.execute('procedure_name', function(err, recordsets, returnValue) {
// ... error checks
console.dir(recordsets);
});
Not sure it's wise to answer this question, but it might be valueable for future readers/googlers.
It's getting better with ES6/7 additions to JS. This is how you can do it with async/await:
async function getDataFromProcedure(dbConfig, procedureName) {
try {
await sql.connect(dbConfig);
const request = new sql.Request();
recordsets = await request.execute(procedureName);
return recordsets[0];
} catch (error) {
// handle error here
}
};
Alternate async/await syntax. I like the .then() format.
return await this.sqlConnectionPool.connect().then(async (pool) => {
return await pool
.request()
.input("UserID", sql.Int, id)
.execute("spADF_User_Get")
.then((result) => {
if (result.recordset && result.recordset.length === 1) {
return result.recordset[0];
} else {
//Something bad happened
}
});
});