Multiple image attachments to couchdb (nano) with Node.js Express 4.0 & formidable - express

I am trying to insert multiple images to couchdb via nano using express 4 and formidable. I can access and insert individual files without difficulty using formidable and nano, however, when I try to insert one file after another, I get conflict errors. I am very new to js and node, and I know my understanding of callbacks and asynchronous functions is limited. This is what I have at the moment. Any help would be greatly appreciated.
function uploadImage(req, res) {
var form = new formidable.IncomingForm(),
files = [],
fields = [];
uploadcount = 1;
form.on('field', function(field, value) {
fields.push([field, value]);
})
form.on('file', function(field, file) {
files.push([field, file]);
var docid = fields[0][1];
getrevision();
function getRevision(){
dbn.get(docid, { revs_info: true }, function(err,body, file){
if (!err) {
exrev = body._rev;
insertImage(exrev);
}else{
console.log(err);
}
});
}
function insertImage(exrevision){
var exrev = exrevision;
fs.readFile(file.path, function (err, data) {
if (err){
console.log(err);}else{
var imagename = docid + "_" + uploadcount + ".png";
dbn.attachment.insert(docid,imagename,data,'image/png',
{ rev: exrev }, function(err,body){
if (!err) {
uploadcount++;
}else{
console.log(err);
}
});
};
});
};
});
form.on('end', function() {
console.log('done');
res.redirect('/public/customise.html');
});
form.parse(req);
};

I found a solution by dumping the files first into a temporary directory, then proceeding to insert the files into couchdb via nano all within a single function. I couldn't find a way to pause the filestream to wait for the couchdb response, so this sequential method seems adequate.

This is a problem handling asynchronous calls. Because each attachment insert requires the doc's current rev number, you can't do the inserts in parallel. You must insert a new attachment only after you get a response from the the previous one.
You may use the promise and deferred mechanism to do this. But, I personally have solved a similar problem using a package called "async". In async, you can use async.eachSeries() to make these async calls in series.
Another point is about the revision number, you may just use the lighter weight db.head() function, instead of db.get(). The rev number is presented under the "etag" header. You can get the rev like this:
// get Rev number
db.head(bookId, function(err, _, headers) {
if (!err) {
var rev = eval(headers.etag);
// do whatever you need to do with the rev number
......
}
});
In addition, after each attachment insert, the response from couchdb will look something like this:
{"ok":true,"id":"b2aba1ed809a4395d850e65c3ff2130c","rev":"4-59d043853f084c18530c2a94b9a2caed"}
The rev property will give the new rev number which you may use for inserting to the next attachment.

Related

How can I use the same value as written in the Json during the same test execution in the testcafe

I have been trying to use the value from the JSON that I have got added successfully using fs.write() function,
There are two test cases in the same fixture, one to create an ID and 2nd to use that id. I can wrote the id successfully in the json file using fs.write() function and trying to use that id using importing json file like var myid=require('../../resources/id.json')
The json file storing correct id of the current execution but I get the id of first test execution in 2nd execution.
For example, id:1234 is stored during first test execution and id:4567 is stored in 2nd test execution. During 2nd test execution I need the id:4567 but I get 1234 this is weird, isn't it?
I use it like
t.typeText(ele, myid.orid)
my json file contains only id like {"orid":"4567"}
I am new to Javascript and Testcafe any help would really be appreciated
Write File class
const fs = require('fs')
const baseClass =require('../component/base')
class WriteIntoFile{
constructor(orderID){
const OID = {
orderid: orderID
}
const jsonString = JSON.stringify(OID)
fs.writeFile(`resources\id.json`, jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})
}
}
export default WriteIntoFile
I created 2 different classes in order to separate create & update operations and call the functions of create & update order in single fixture in test file
Create Order class
class CreateOrder{
----
----
----
async createNewOrder(){
//get text of created ordder and saved order id in to the json file
-----
-----
-----
const orId= await baseclass.getOrderId();
new WriteIntoFile(orId)
console.log(orId)
-----
-----
-----
}
}export default CreateOrder
Update Order class
var id=require('../../resources/id.json')
class UpdateOrder{
async searchOrderToUpdate(){
await t
***//Here, I get old order id that was saved during previous execution***
.typeText(baseClass.searchBox, id.orderid)
.wait(2500)
.click(baseClass.searchIcon)
.doubleClick(baseClass.orderAGgrid)
console.log(id.ordderid)
----
----
async updateOrder(){
this.searchOrderToUpdate()
.typeText(baseClass.phNo, '1234567890')
.click(baseClass.saveBtn)
}
}export default UpdateOrder
Test file
const newOrder = new CreateOrder();
const update = new UpdateOrder();
const role = Role(`siteurl`, async t => {
await t
login('id')
await t
.wait(1500)
},{preserveUrl:true})
test('Should be able to create an Order', async t=>{
await newOrder.createNewOrder();
});
test('Should be able to update an order', async t=>{
await update.updateOrder();
});
I'll reply to this, but you probably won't be happy with my answer, because I wouldn't go down this same path as you proposed in your code.
I can see a couple of problems. Some of them might not be problems right now, but in a month, you could struggle with this.
1/ You are creating separate test cases that are dependent on each other.
This is a problem because of these reasons:
what if Should be able to create an Order doesn't run? or what if it fails? then Should be able to update an order fails as well, and this information is useless, because it wasn't the update operation that failed, but the fact that you didn't meet all preconditions for the test case
how do you make sure Should be able to create an Order always runs before hould be able to update an order? There's no way! You can do it like this when one comes before the other and I think it will work, but in some time you decide to move one test somewhere else and you are in trouble and you'll spend hours debugging it. You have prepared a trap for yourself. I wrote this answer on this very topic, you can read it.
you can't run the tests in parallel
when I read your test file, there's no visible hint that the tests are dependent on each other. Therefore as a stranger to your code, I could easily mess things up because I have no way of knowing about it without going deeper in the code. This is a big trap for anyone who might come to your code after you. Don't do this to your colleagues.
2/ Working with files when all you need to do is pass a value around is too cumbersome.
I really don't see a reason why you need to same the id into a file. A slightly better approach (still violating 1/) could be:
const newOrder = new CreateOrder();
const update = new UpdateOrder();
// use a variable to pass the orderId around
// it's also visible that the tests are dependent on each other
let orderId = undefined;
const role = Role(`siteurl`, async t => {
// some steps, I omit this for better readability
}, {preserveUrl: true})
test('Should be able to create an Order', async t=>{
orderId = await newOrder.createNewOrder();
});
test('Should be able to update an order', async t=>{
await update.updateOrder(orderId);
});
Doing it like this also slightly remedies what I wrote in 1/, that is that it's not visible at first sight that the tests are dependent on each other. Now, this is a bit improved.
Some other approaches how you can pass data around are mentioned here and here.
Perhaps even a better approach is to use t.fixtureCtx object:
const newOrder = new CreateOrder();
const update = new UpdateOrder();
const role = Role(`siteurl`, async t => {
// some steps, I omit this for better readability
}, {preserveUrl:true})
test('Should be able to create an Order', async t=>{
t.fixtureCtx.orderId = await newOrder.createNewOrder();
});
test('Should be able to update an order', async t=>{
await update.updateOrder(t.fixtureCtx.orderId);
});
Again, I can at least see the tests are dependent on each other. That's already a big victory.
Now back to your question:
During 2nd test execution I need the id:4567 but I get 1234 this is weird, isn't it?
No, it's not weird. You required the file:
var id = require('../../resources/id.json')
and so it's loaded once and if you write into the file later, you won't read the new content unless you read the file again. require() is a function in Node to load modules, and it makes sense to load them once.
This demonstrates the problem:
const idFile = require('./id.json');
const fs = require('fs');
console.log(idFile); // { id: 5 }
const newId = {
'id': 7
};
fs.writeFileSync('id.json', JSON.stringify(newId));
// it's been loaded once, you won't get any other value here
console.log(idFile); // { id: 5 }
What you can do to solve the problem?
You can use fs.readFileSync():
const idFile = require('./id.json');
const fs = require('fs');
console.log(idFile); // { id: 5 }
const newId = {
'id': 7
};
fs.writeFileSync('id.json', JSON.stringify(newId));
// you need to read the file again and parse its content
const newContent = JSON.parse(fs.readFileSync('id.json'));
console.log(newContent); // { id: 7 }
And this is what I warned you against in the comment section. That this is too cumbersome, inefficient, because you write to a file and then read from the file just to get one value.
What you created is not very readable either:
const fs = require('fs')
const baseClass =require('../component/base')
class WriteIntoFile{
constructor(orderID){
const OID = {
orderid: orderID
}
const jsonString = JSON.stringify(OID)
fs.writeFile(`resources\id.json`, jsonString, err => {
if (err) {
console.log('Error writing file', err)
} else {
console.log('Successfully wrote file')
}
})
}
}
export default WriteIntoFile
All these operations for writing into a file are in a constructor, but a constructor is not the best place for all this. Ideally you have only variable assignments in it. I also don't see much reason for why you need to create a new class when you are doing only two operations that can easily fit on one line of code:
fs.writeFileSync('orderId.json', JSON.stringify({ orderid: orderId }));
Keep it as simple as possible. it's more readable like so than having to go to a separate file with the class and decypher what it does there.

Storing results of SQL Select query in a var node js

I'm looking for a way to store the results of this select query like a "rank" chart for a game but I'm not sure if what I'm encountering is an async issue or a data-type issue or something else entirely. Any thoughts?
var ranksVar = [];
db.all("select * from user", function(err, rows){
if(err) {
throw err;
} else {
setValue(rows);
}
});
function setValue(value) {
ranksVar = value;
console.log(ranksVar);
}
I've found out a useful post about using SQLite with NodeJS and it gives you the basic examples needed to understand how to use it. Here it is: SQLite NodeJS
In your case, look under the section data query.
Here the example code:
const sqlite3 = require('sqlite3').verbose();
// open the database
let db = new sqlite3.Database('./db/chinook.db');
let sql = `SELECT DISTINCT Name name FROM playlists
ORDER BY name`;
db.all(sql, [], (err, rows) => {
if (err) {
throw err;
}
rows.forEach((row) => {
console.log(row.name);
});
});
// close the database connection
db.close();
As you can see, the rows variable is (I guess) a special type created by the module. To get all data from it, you might want to iterate over it and push it into your array variable.
Cheers 😁 !
I figured out the issue. It was a data type/async issue, I was trying to print a string for an undefined array.
Went back a loop and used the JSON.stringify method to display the array objects correctly.

HapiJS reply with readable stream

For one call, I am replying with a huge JSON object which sometimes causes the Node event loop to become blocked. As such, I'm using Big Friendly JSON package to stream JSON instead. My issue is I cannot figure out how to actually reply with the stream
My original code was simply
let searchResults = s3Access.getSavedSearch(guid)).Body;
searchResults = JSON.parse(searchResults.toString());
return reply(searchResults);
Works great but bogs down on huge payloads
I've tried things like, using the Big Friendly JSON package https://gitlab.com/philbooth/bfj
const stream = bfj.streamify(searchResults);
return reply(stream); // according to docs it's a readable stream
But then my browser complained about an empty response. I then tried to add the below to the reply, same result.
.header('content-encoding', 'json')
.header('Content-Length', stream.length);
I also tried return reply(null, stream); but that produced a ton of node errors
Is there some other way I need to organize this? My understanding was I could just reply a readable stream and Hapi would take care of it, but the response keeps showing up as empty.
Did you try to use h.response, here h is reply.
Example:
handler: async (request, h) => {
const { limit, sortBy, order } = request.query;
const queryString = {
where: { status: 1 },
limit,
order: [[sortBy, order]],
};
let userList = {};
try {
userList = await _getList(User, queryString);
} catch (e) {
// throw new Boom(e);
Boom.badRequest(i18n.__('controllers.user.fetchUser'), e);
}
return h.response(userList);
}

How to check & confirm if dropzone is actually sending file to server

I am using dropzonejs and using following code to initialize dropzone:
var myDropzone = new Dropzone("div#myId", { url: "/file/post"});
On the server side, I am using expressjs\nodejs & using following code:
fs.readFile(req.files.displayImage.path, function (err, data) {
// ...
var newPath = __dirname + "/uploads/uploadedFileName";
fs.writeFile(newPath, data, function (err) {
res.redirect("back");
});
});
problem is req.files is always coming as undefined.
I want to know when we upload a file, is there a way through developertoolbar to check the payload & confirm whether file is been sent or not?

Asynchronous confusion with ItemFileReadStore query

I want to save the results from a query using itemFileReadStore into an array called boxes, but the return value is empty (presumably because fetch is run asynchronously).
The gotItems function builds the array as I want it to, but I can't return that back to myself for any use! I could build the rest of my functionality into the gotItems part, but that would make my code unpretty.
How do I return an array for general use in my JavaScript from the gotItems function?
function getContentFile() {
contentStore = new dojo.data.ItemFileReadStore({
url: '../config/content.json',
preventCache : true
});
var boxes = new Array();
contentStore.fetch({query: {partner : 'enabled'}, onItem: gotItems });
return boxes;
}
function gotItems(item ) {
boxes.push( contentStore.getValue(item,'title') );
console.log( boxes );
return boxes;
}
dojo.addOnLoad( function() {
boxes = getContentFile();
console.log(boxes);
fadeIn('header', 500, 0);
});
Welcome to the world of asynchronous operations.
You'll need to do it with the "continuation-style" programming. ItemFileReadStore's fetch operations is asynchronous -- as you already know by passing the gotItems continuation to it.
contentStore.fetch({query: {partner : 'enabled'}, onItem: gotItems }) will return immediately. Your boxes will be empty at that point (because JavaScript is single-threaded). gotItems is executed after data arrived and subsequent to the function passed to dojo.addOnLoad returning.
You have to put your handling code:
console.log(boxes);
fadeIn('header', 500, 0);
inside the continuation gotItems itself. For example, something like:
function gotItems(item ) {
var boxes = [];
dojo.forEach(item, function(box) {
boxes.push( contentStore.getValue(box,'title') );
});
console.log(boxes); // You probably need to store "boxes" somewhere instead of just logging it
fadeIn('header', 500, 0);
}
Also, the data passed to onItems is an array, so you need to iterate it.
You don't have access to the results when the function returns because as you guessed, the fetch operation executes asynchronously.
You can either put the code that uses the results in your gotItems() function (as answered by Stephen), or you can use Deferreds and Promises. IMHO, that's a better alternative since it lets you organize your code better (once you get used to the idioms of dealing with promises, the code reads more naturally) and it allows you to transparently execute both synchronous and asynchronous operations.
See these two Dojo tutorials on the subject.
In your case, a possible solution involving deferreds would read like:
function getContentFile() {
contentStore = new dojo.data.ItemFileReadStore({
url: '../config/content.json',
preventCache: true
});
var dfd = new dojo.Deferred();
var boxes = new Array();
contentStore.fetch({
query: { partner : 'enabled' },
onItem: function(item) {
boxes.push( contentStore.getValue(item,'title') );
},
onComplete: function() {
// resolve the promise and execute the function in then()
dfd.callback(boxes);
}
});
return dfd;
}
dojo.addOnLoad( function() {
getContentFile().then(function(boxes) {
console.log(boxes);
fadeIn('header', 500, 0);
});
});