What's the different between Async.queue and Promise.map? - express

I tried to stress test my api in ExpressJS and to handler multi request I used Promise.all and then Async.queue with concurrency option.
Promise:
export const myapi = async (args1, args2) => {
console.log('args:', args1, args2);
let testing_queue = [];
testing_queue.push(new Promise(async (resolve, reject) => {
let result = await doAComplexQuery(args1, args2); // SELECT... JOIN...
if (!result || result.length <= 0)
reject(new Error('Cannot find anything!'));
resolve(result);
}
));
return await Bluebird.map(testing_queue, async item => {
return item;
}, {concurrency: 4}); };
Async.queue: (https://www.npmjs.com/package/async)
export const myapi = async (args1, args2) => {
console.log('args:', args1, args2);
let testing_queue = Async.queue(function (task, callback) {
console.log('task', task);
callback();
}, 4);
testing_queue.push(async function () {
let result = await doAComplexQuery(args1, args2); // SELECT... JOIN...
if (!result || result.length <= 0)
throw new Error('Cannot find anything!');
return result;
}
);};
And try to make request as much as possible:
const response = async function () {
return await Axios.post('http://localhost:3000/my-api', {
"args1": "0a0759eb",
"args2": "b9142db8"
}, {}
).then(result => {
return result.data;
}).catch(error => {
console.log(error.message);
});
};
for (var i = 0; i < 10000; i++) {
response();
}
And Run. The #1 way returns many ResourceTimeout or Socket hang up responses. Meanwhile, the #2 returns success response for all requests and runs even faster.
So is the Async.queue better in this case?

I think it could help the speed if you raise the concurrency limit on your promise.map.

Related

API resolved without sending a response - Next.js

I've this code to get nearby places and nearby beaches from a point, with Google maps. This is called from a Next.js component, via the useSWR hook.
All the data is returned correctly, but before first Axios call (const fetchNearbyPlaces = async (urlWithToken = null) => {...), I'm receiving this error in the console:
API resolved without sending a response for /api/google/places/33.807501/-78.70039, this may result in stalled requests.
I can't figure out what the error is, although there may be several because I'm a novice. I appreciate any suggestion.
const axios = require("axios");
const GetNearbyPlaces = async (req, res) => {
const {
latitude,
longitude,
} = req.query;
const radius = 50000;
const types = [
"airport",
"tourist_attraction",
"amusement_park",
"aquarium",
"art_gallery",
"bar",
"museum",
"night_club",
"cafe",
"restaurant",
"shopping_mall",
"store",
"spa",
];
function checkFunc(arr, val) {
return arr.some(arrVal => val === arrVal);
}
const url = `https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=${latitude}%2C${longitude}&radius=${radius}&key=${process.env.CW_GOOGLE_MAPS_API_KEY}`;
const beachesUrl = `https://maps.googleapis.com/maps/api/place/nearbysearch/json?location=${latitude}%2C${longitude}&radius=${radius}&type=natural_feature&key=${process.env.CW_GOOGLE_MAPS_API_KEY}`;
try {
let results = [];
let beaches = [];
const fetchNearbyBeaches = async (urlWithToken = null) => {
await axios.get(urlWithToken ? urlWithToken : beachesUrl).then(data => {
beaches = [...beaches, ...data.data.results];
if (data?.data?.next_page_token) {
const newUrl = `https://maps.googleapis.com/maps/api/place/nearbysearch/json?key=${process.env.CW_GOOGLE_MAPS_API_KEY}&pagetoken=${data.data.next_page_token}`;
setTimeout(() => {
fetchNearbyBeaches(newUrl);
}, 2000);
} else {
beaches.length > 5 && beaches.splice(5);
results.length > 5 && results.splice(5);
const finalResults = [...beaches, ...results];
finalResults.length > 10 && finalResults.splice(10);
return res.status(200).json({
data: {
results: finalResults,
},
success: true,
});
}
});
};
const fetchNearbyPlaces = async (urlWithToken = null) => {
await axios.get(urlWithToken ? urlWithToken : url).then(data => {
results = [...results, ...data.data.results];
if (data?.data?.next_page_token) {
const newUrl = `https://maps.googleapis.com/maps/api/place/nearbysearch/json?key=${process.env.CW_GOOGLE_MAPS_API_KEY}&pagetoken=${data.data.next_page_token}`;
setTimeout(() => {
fetchNearbyPlaces(newUrl);
}, 2000);
} else {
const dirtyResultsWithDuplicates = [];
results.map(result => {
return types.map(type => {
if (checkFunc(result.types, type) && !result.types.includes("lodging")) {
dirtyResultsWithDuplicates.push(result);
}
});
});
const set = new Set(dirtyResultsWithDuplicates);
const filtered = Array.from(set);
results = filtered.length > 10 ? filtered.splice(10) : filtered;
return fetchNearbyBeaches();
}
});
};
fetchNearbyPlaces();
} catch (err) {
res.status(500).json({
message: err.message,
statusCode: 500,
});
}
};
export default GetNearbyPlaces;
The problem is with the backend application not the frontend component.
Nextjs expects a response to have been sent when the api handler function exits. If for example you have a databaseCall.then(sendResponse) in your api handler function what happens is that the handler function exits before the database returns.
Now this is not a problem if the database does return after that and sends the response, but it is if for example the database has an error. Because the handler function exits without a response already being sent Nextjs can't be sure that at that point there isn't a stalled request.
One way to fix this is by await-ing the db call(or whatever other async function you call) thereby preventing the handler function from exiting before some kind of response has been send.
The solution was added this object to mi API code.
export const config = {
api: {
externalResolver: true,
},
};
Documentation: https://nextjs.org/docs/api-routes/request-helpers

vue method for loop wait for function complete

In this vue component, I have a method containing a for loop, calling another method. The second method does a request to the appserver. I need the first function waiting for the second to continue the for-loop. I've tried several async await options but doesn't understand how to implement it.
methods: {
selectFiles(files) {
this.progressInfos = [];
this.selectedFiles = files;
},
uploadFiles() {
this.message = "";
//var result = 0;
for (let i = 0; i < this.selectedFiles.length; i++) {
console.log(i)
//result = await this.upload(i, this.selectedFiles[i]);
this.upload(i, this.selectedFiles[i]);
}
},
upload(idx, file) {
this.progressInfos[idx] = { percentage: 0, fileName: file.name };
//console.log("FinDocuNum:" + financialDocument.finDocId)
FinancialDocumentDataService.upload(1, file, (event) => {
this.progressInfos[idx].percentage = Math.round(100 * event.loaded / event.total);
}).then((response) => {
let prevMessage = this.message ? this.message + "\n" : "";
this.message = prevMessage + response.status;
return 1;
}).catch(() => {
this.progressInfos[idx].percentage = 0;
this.message = "Could not upload the file:" + file.name;
return 0;
});
}
}
The upload function must be async and return a promise like this:
async upload(file) {
return new Promise((resolve, reject) => {
axios({url: url, data: file, method: 'POST'})
.then(resp => {
resolve(resp)
})
.catch(err => {
reject(err)
})
})
},

jest how can test axios?

No matter how I write, asynchronous problems occur.
test.js:
const auth = require('../methods/auth.js');
describe('test', () => {
test('test', async () => {
expect.assertions(1);
const data = await auth.signin();
return expect(data.success).toBeTruthy();
});
auth.js:
module.exports = {
async signin(data) {
try {
const res = await axios.post('/signin', data);
return res.data;
} catch (error) {
return error.response;
}
},
}
Each execution result is different.
You can test Promises as follows:
test('test awaiting it to resolve', () => {
// Don't await. Note the return; test callback doesn't need to be async
// What I do in my tests as its more readable and the intention is clear
return expect(auth.signin()).resolves.toEqual({success: true});
});
test('test the promises way', () => {
// Not better than above; traditional way; note 'return'
return auth.signin().then(data => { expect(data.success).toBeTruthy() });
});
Detailed notes from jest: https://jestjs.io/docs/asynchronous#promises

Vue Jest Testing await in loop

I'm pretty new in unit testing and I'm having some issues writing a function test for await in a loop.
code to test:
export default {
name: 'MyComponent',
setup() {
const count = ref<number>(0);
function foo(count: number, delay: number): Promise<void> {
const resolver = async (resolve: () => void) => {
count.value = count;
while (count.value >= 0) {
await yoo();
await wait(delay);
count.value -= 1;
}
}
return new Promise<void>(resolver);
}
async function yoo() {
// something
await koo();
// something else
}
function wait(waitTime: number): Promise<void> {
const resolver = (resolve: () => void) => {
setTimeout(resolve, waitTime);
};
return new Promise(resolver);
}
}
}
test:
let wrapper = mount(MyComponent);
let promise: Promise<void>;
let count = 5;
jest.useFakeTimers();
beforeAll(() => {
promise = wrapper.vm.foo(count, 100);
})
test(`count value decreases at each iteration`, async () => {
while (wrapper.vm.count >= 0) {
const prevCountValue = wrapper.vm.count;
await flushPromises();
jest.runAllTimers();
expect(wrapper.vm.count).toBe(prevCountValue - 1);
}
}
Writing the test in this way breaks due to jest.setTimeout.Error:
Any ideas on what is the correct way to write this test?

stream s3 to dynamodb with fast-csv : not all data inserted

When a csv file is uploaded on my s3 bucket, my lambda will be triggered to insert my data into DynamoDB.
I need a stream because the file is too large to be downloaded as full object.
const batchWrite = async (clientDynamoDB, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const params = { RequestItems: ri };
await clientDynamoDB.batchWriteItem(params).promise();
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("data", (row) => {
stream.pause();
itemsToProcess.push(row);
if (itemsToProcess.length === sizeChunk) {
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
stream.resume();
});
itemsToProcess = [];
}
})
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on("end", () => {
stream.pause();
console.log("end");
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
resolve("OK");
});
});
});
}
module.exports.main = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const object = event.Records[0].s3;
const bucket = object.bucket.name;
const file = object.object.key;
const agent = new https.Agent({
keepAlive: true
});
const client = new AWS.DynamoDB({
httpOptions: {
agent
}
});
try {
//get Stream csv data
const stream = s3
.getObject({
Bucket: bucket,
Key: file
})
.createReadStream()
.on('error', (e) => {
console.log(e);
});
await runStreamPromiseAsync(stream, client);
} catch (e) {
console.log(e);
}
};
When my file is 1000 lines everything is inserted but when I have 5000 lines, my function insert only around 3000 lines and this number is random... Sometimes more sometimes less..
So I'd like to understand what am I missing here ?
I also read this article but to be honest even if you pause the second stream, the first one is still running.. So if someone have any ideas on how to do this, it would be greatly appreciated !
Thanks
I found out why It was not fully processed, it's because the callback of batchWriteItem can return unprocess Items. So I change the function batchWrite and also the runPromiseStreamAsync a little bit because i might not have all the items processed from itemsToProcess.
Anyway here is the full code :
const batchWrite = (client, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const items = { RequestItems: ri };
const processItemsCallback = function(err, data) {
return new Promise((resolve, reject) => {
if(!data || data.length === 0){
return resolve();
}
if(err){
return reject(err);
}
let params = {};
params.RequestItems = data.UnprocessedItems;
return client.batchWriteItem(params, processItemsCallback);
});
};
return client.batchWriteItem(items, processItemsCallback );
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
let arrayPromise = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on('data', data => {
itemsToProcess.push(data);
if(itemsToProcess.length === sizeChunk){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
itemsToProcess = [];
}
})
.on('end', () => {
if(itemsToProcess.length !== 0){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
}
resolve(Promise.all(arrayPromise).catch(e => {
reject(e)
}));
});
});
}