NodeJS PostgreSQL - sql

I have a question, how can I output all rows from postgresql. Now I have some errors. Please help me. Thanks.
This is my code:
'use strict'
const res = client.query("SELECT * FROM public", function(err, rows, fileds) {
const row = [];
for(let i=0; i<rows.length; i++) {
row = rows[i];
console.log(row);
}
rows.forEach(async function(row) {
console.log(row.name);
})
console.log('Finish');
});
const func = ms => new Promise(res => setTimeout(res, ms));
console.dir({func});
console.dir(res);
client.end();

As you are not using pool yet, I assume you are using pg older than 6. You should do:
return client.query(sqlStatement)
.then(res =>
{
client.end();
return res.rows;
})
.catch(e =>
{
client.end();
console.error(e);
throw e;
});

Related

vue method for loop wait for function complete

In this vue component, I have a method containing a for loop, calling another method. The second method does a request to the appserver. I need the first function waiting for the second to continue the for-loop. I've tried several async await options but doesn't understand how to implement it.
methods: {
selectFiles(files) {
this.progressInfos = [];
this.selectedFiles = files;
},
uploadFiles() {
this.message = "";
//var result = 0;
for (let i = 0; i < this.selectedFiles.length; i++) {
console.log(i)
//result = await this.upload(i, this.selectedFiles[i]);
this.upload(i, this.selectedFiles[i]);
}
},
upload(idx, file) {
this.progressInfos[idx] = { percentage: 0, fileName: file.name };
//console.log("FinDocuNum:" + financialDocument.finDocId)
FinancialDocumentDataService.upload(1, file, (event) => {
this.progressInfos[idx].percentage = Math.round(100 * event.loaded / event.total);
}).then((response) => {
let prevMessage = this.message ? this.message + "\n" : "";
this.message = prevMessage + response.status;
return 1;
}).catch(() => {
this.progressInfos[idx].percentage = 0;
this.message = "Could not upload the file:" + file.name;
return 0;
});
}
}
The upload function must be async and return a promise like this:
async upload(file) {
return new Promise((resolve, reject) => {
axios({url: url, data: file, method: 'POST'})
.then(resp => {
resolve(resp)
})
.catch(err => {
reject(err)
})
})
},

Unable to execute any query in SQLite with react-native-sqlite-storage ...i am trying it for android tv app

Blockquote
I also created database with table and store it into android/app/src/main/assets/sqlite.db
and also set createFromLocation to 1 or 2
useEffect(() => {
try {
db = SQLite.openDatabase({
name: "sqlite.db",
createFromLocation:"~sqlite.db"
},
sucessToOpen,
errorToOpen
)
}
catch (err) {
console.log(err)
}
}, [])
....
const sucessToOpen = async (data) => {
console.log("DB connected",data)
db.transaction(tx => {
tx.executeSql(
'SELECT * FROM hydro',[],
(tx, results) => {
let datalength = results.row.length
alert(datalength)
console.log("results",datalength)
},
)
});
}
This work for me
db.transaction(tx => {
tx.executeSql('SELECT * FROM TABLE_NAME', [], (tx, results) => {
var temp = [];
for (let i = 0; i < results.rows.length; ++i) {
temp.push(results.rows.item(i));
}
console.log('TABLE_NAME', temp)
});
});

stream s3 to dynamodb with fast-csv : not all data inserted

When a csv file is uploaded on my s3 bucket, my lambda will be triggered to insert my data into DynamoDB.
I need a stream because the file is too large to be downloaded as full object.
const batchWrite = async (clientDynamoDB, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const params = { RequestItems: ri };
await clientDynamoDB.batchWriteItem(params).promise();
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("data", (row) => {
stream.pause();
itemsToProcess.push(row);
if (itemsToProcess.length === sizeChunk) {
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
stream.resume();
});
itemsToProcess = [];
}
})
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on("end", () => {
stream.pause();
console.log("end");
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
resolve("OK");
});
});
});
}
module.exports.main = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const object = event.Records[0].s3;
const bucket = object.bucket.name;
const file = object.object.key;
const agent = new https.Agent({
keepAlive: true
});
const client = new AWS.DynamoDB({
httpOptions: {
agent
}
});
try {
//get Stream csv data
const stream = s3
.getObject({
Bucket: bucket,
Key: file
})
.createReadStream()
.on('error', (e) => {
console.log(e);
});
await runStreamPromiseAsync(stream, client);
} catch (e) {
console.log(e);
}
};
When my file is 1000 lines everything is inserted but when I have 5000 lines, my function insert only around 3000 lines and this number is random... Sometimes more sometimes less..
So I'd like to understand what am I missing here ?
I also read this article but to be honest even if you pause the second stream, the first one is still running.. So if someone have any ideas on how to do this, it would be greatly appreciated !
Thanks
I found out why It was not fully processed, it's because the callback of batchWriteItem can return unprocess Items. So I change the function batchWrite and also the runPromiseStreamAsync a little bit because i might not have all the items processed from itemsToProcess.
Anyway here is the full code :
const batchWrite = (client, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const items = { RequestItems: ri };
const processItemsCallback = function(err, data) {
return new Promise((resolve, reject) => {
if(!data || data.length === 0){
return resolve();
}
if(err){
return reject(err);
}
let params = {};
params.RequestItems = data.UnprocessedItems;
return client.batchWriteItem(params, processItemsCallback);
});
};
return client.batchWriteItem(items, processItemsCallback );
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
let arrayPromise = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on('data', data => {
itemsToProcess.push(data);
if(itemsToProcess.length === sizeChunk){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
itemsToProcess = [];
}
})
.on('end', () => {
if(itemsToProcess.length !== 0){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
}
resolve(Promise.all(arrayPromise).catch(e => {
reject(e)
}));
});
});
}

What's the different between Async.queue and Promise.map?

I tried to stress test my api in ExpressJS and to handler multi request I used Promise.all and then Async.queue with concurrency option.
Promise:
export const myapi = async (args1, args2) => {
console.log('args:', args1, args2);
let testing_queue = [];
testing_queue.push(new Promise(async (resolve, reject) => {
let result = await doAComplexQuery(args1, args2); // SELECT... JOIN...
if (!result || result.length <= 0)
reject(new Error('Cannot find anything!'));
resolve(result);
}
));
return await Bluebird.map(testing_queue, async item => {
return item;
}, {concurrency: 4}); };
Async.queue: (https://www.npmjs.com/package/async)
export const myapi = async (args1, args2) => {
console.log('args:', args1, args2);
let testing_queue = Async.queue(function (task, callback) {
console.log('task', task);
callback();
}, 4);
testing_queue.push(async function () {
let result = await doAComplexQuery(args1, args2); // SELECT... JOIN...
if (!result || result.length <= 0)
throw new Error('Cannot find anything!');
return result;
}
);};
And try to make request as much as possible:
const response = async function () {
return await Axios.post('http://localhost:3000/my-api', {
"args1": "0a0759eb",
"args2": "b9142db8"
}, {}
).then(result => {
return result.data;
}).catch(error => {
console.log(error.message);
});
};
for (var i = 0; i < 10000; i++) {
response();
}
And Run. The #1 way returns many ResourceTimeout or Socket hang up responses. Meanwhile, the #2 returns success response for all requests and runs even faster.
So is the Async.queue better in this case?
I think it could help the speed if you raise the concurrency limit on your promise.map.

Send single response after multiple updates

I have an array of items that I am passing to an API endpoint (using Sequelize as my ORM). I'm trying to iterate over each item and update it, however I'm getting a Unhandled rejection Error: Can't set headers after they are sent.
stepsController.put = (req, res) => {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
steps.map(step => {
Step.findOne({ where: { id: step.id } })
.then(savedStep =>
savedStep
.update({
order: step.order,
})
.then(success => res.status(200).send(success))
.catch(error => res.send(error))
)
.then(ok => res.status(200).send(ok))
.catch(err => res.send(err));
});
};
I believe this is because it's sending the response for each item. Sequelize's update method is a promise. How can I iterate over all of the items and make sure all of the items are updated before sending a single successful response?
There are three ways you can do
Promise.all
Co
Async Await
1) Here it is , you can use Promise.all :
stepsController.put = (req, res) => {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
Promise.all(steps.map(step => {
return Step.findOne({ where: { id: step.id } }).then(savedStep =>
return savedStep.update({
order: step.order,
})
.catch(error => error)
).catch(err => err)
}))
.then(ok => res.status(200).send(ok))
.catch(err => res.send(err));
};
2) Another way is to use co :
const co = require('co');
stepsController.put = co.wrap(function* (req, res) => {
try {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
for(let i=0;i<steps.length ; i++) {
let savedStep = yield Step.findOne({ where: { id: steps[i].id } });
if(savedStep)
yield savedStep.update({ order: steps[i].order});
}
res.status(200).send();
}
catch(err){
res.send(err);
}
});
3) If you’re using Node 8.0+ , there is no need of any package you can directly use async await :
stepsController.put = async(req, res) => {
try {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
for(let i=0;i<steps.length ; i++) {
let savedStep = await Step.findOne({ where: { id: steps[i].id } });
if(savedStep)
await savedStep.update({ order: steps[i].order});
}
res.status(200).send();
}
catch(err){
res.send(err);
}
};