Using async / await with mongoose.js - express

Looking for help to rewrite these 2 queries using async / await instead of using the nested callbacks approach.
exports.post_edit_get = function(req, res, next) {
var id = req.params.id;
if (mongoose.Types.ObjectId.isValid(id)){
POST.findById(id, function (err, doc){
if (err) { return next(err); }
playerQuery.exec(function (err, players){
if (err) { return next(err); }
res.render('posts/posts_admin', { title: pageTitle, formData: doc, players: players });
});
});
}else{
res.send("Invalid ID");
};
};

Here you go
const { isValid } = mongoose.Types.ObjectId
exports.post_edit_get = async function(req, res, next) {
var { id } = req.params;
if (!isValid(id)){
return res.send("Invalid ID");
}
try {
const post = await POST.findById(id)
const players = await playerQuery.exec()
res.render('posts/posts_admin', {
title: pageTitle,
formData: doc,
players: players
})
} catch (err) {
return next(err)
}
}
If you want to get rid of these try/catches at the route handler level you'll want to have a look at this post; Using async/await to write cleaner route handlers

Related

infinity loading when try to scrap title article with cheerio

infinity loading when try to scrap title article
i was trying to scrap title articles with cheerio in node js but i could not make it happen
infinity loading in google chrome and no content . here is my controller.js file that i trying to use cheerio to scrap title articles with cheerio and show handlesbars view engine
but view engie is working and everything is fine . but for scrap infinity loading in chrome with no error
classname that i use for this website is .c-entry-box--compact__title
maybe i am wrong . but i can not figure out
var express = require("express");
var router = express.Router();
var path = require("path");
var request = require("request");
var cheerio = require("cheerio");
var Comment = require("../models/Comment.js");
var Article = require("../models/Article.js");
router.get("/", function(req, res) {
res.redirect("/articles");
});
router.get("/scrape", function(req, res) {
request("http://www.theverge.com", function(error, response, html) {
var $ = cheerio.load(html);
var titlesArray = [];
$(".c-entry-box--compact__title").each(function(i, element) {
var result = {};
result.title = $(this)
.children("a")
.text();
result.link = $(this)
.children("a")
.attr("href");
if (result.title !== "" && result.link !== "") {
if (titlesArray.indexOf(result.title) == -1) {
titlesArray.push(result.title);
Article.count({ title: result.title }, function(err, test) {
if (test === 0) {
var entry = new Article(result);
entry.save(function(err, doc) {
if (err) {
console.log(err);
} else {
console.log(doc);
}
});
}
});
} else {
console.log("Article already exists.");
}
} else {
console.log("Not saved to DB, missing data");
}
});
res.redirect("/");
});
});
router.get("/articles", function(req, res) {
Article.find()
.sort({ _id: -1 })
.exec(function(err, doc) {
if (err) {
console.log(err);
} else {
var artcl = { article: doc };
res.render("index", artcl);
}
});
});
router.get("/articles-json", function(req, res) {
Article.find({}, function(err, doc) {
if (err) {
console.log(err);
} else {
res.json(doc);
}
});
});
router.get("/clearAll", function(req, res) {
Article.remove({}, function(err, doc) {
if (err) {
console.log(err);
} else {
console.log("removed all articles");
}
});
res.redirect("/articles-json");
});
router.get("/readArticle/:id", function(req, res) {
var articleId = req.params.id;
var hbsObj = {
article: [],
body: []
};
Article.findOne({ _id: articleId })
.populate("comment")
.exec(function(err, doc) {
if (err) {
console.log("Error: " + err);
} else {
hbsObj.article = doc;
var link = doc.link;
request(link, function(error, response, html) {
var $ = cheerio.load(html);
$(".l-col__main").each(function(i, element) {
hbsObj.body = $(this)
.children(".c-entry-content")
.children("p")
.text();
res.render("article", hbsObj);
return false;
});
});
}
});
});
router.post("/comment/:id", function(req, res) {
var user = req.body.name;
var content = req.body.comment;
var articleId = req.params.id;
var commentObj = {
name: user,
body: content
};
var newComment = new Comment(commentObj);
newComment.save(function(err, doc) {
if (err) {
console.log(err);
} else {
console.log(doc._id);
console.log(articleId);
Article.findOneAndUpdate(
{ _id: req.params.id },
{ $push: { comment: doc._id } },
{ new: true }
).exec(function(err, doc) {
if (err) {
console.log(err);
} else {
res.redirect("/readArticle/" + articleId);
}
});
}
});
});
module.exports = router;
The http://www.theverge.com Add content dynamically by scrolling.
This is example how to get the title by puppeteer
const puppeteer = require("puppeteer");
const getTitle = async () => {
try {
const browser = await puppeteer.launch({
headless: false,
});
const page = await browser.newPage();
await page.setDefaultNavigationTimeout(0);
await page.goto('https://www.theverge.com');
await page.setViewport({
width:1920,
height:1080
});
// scroll down end of page
await page.evaluate(() => {
window.scrollTo(0, window.document.body.scrollHeight);
});
await page.waitForNavigation({ waitUntil: 'networkidle0' }), // (0 network connections for 500ms)
// get the title
titles = await page.evaluate(() => {
const textSelector = 'div.inline.pr-4.font-bold'
texts = Array.from(document.querySelectorAll(textSelector), row => row.innerText.trim() );
return texts;
});
await browser.close();
return Promise.resolve(titles);
} catch (error) {
return Promise.reject(error);
}
}
getTitle()
.then((titles) => {
console.log(titles); // first news search
})
This is result
[
'Is an upgraded M2 Ultra enough for a new Mac Pro and the Mac Studio?',
'Here’s the official trailer for Christopher Nolan’s next IMAX-filmed epic, Oppenheimer.',
'Sam Bankman-Fried’s ready to surrender himself to the US for extradition.',
'Who knew the thumb drive had such a contentious origin story?',
'But how many pebbles do you have in a jar?',
'This way for The Way of Water.',
'Netflix is taking Blockbuster behind the woodshed again.',
'I can’t escape the year-end wrap-ups.',
'The clock’s ticking if you want to get your gifts on time.',
'Want solar panels on your California home? Now might be the time.',
'Twitter Spaces has returned.',
'Apple’s facing another accusation of breaking labor laws.',
'Every game should have this feature.',
'Google’s working on simplifying smart home control on the wrist.',
'Apple could open up iOS, and the feds finally make a case against SBF.',
'I’m not the first, and I won’t be the last... but I do feel early.',
'For what it’s worth, Avatar: The Way of Water’s a good looking movie.',
'You may not want to upgrade to Apple’s new Home architecture.'
]

Changing the collection based on the request type

This controller accepts the form and updates the data.
export const createPost = async (req, res) => {
const { title, message, selectedFile, creator, tags } = req.body;
const newPostMessage = new OrangeModel ({ title, message, selectedFile, creator, tags })
try {
await newPostMessage.save();
res.status(201).json(newPostMessage );
} catch (err) {
res.status(409).json({ message: err.message });
}
}
I want to change the collection type based on the request.
when the request is from the Grapes url, the model(or collection) should change to GrapeModel from OrangeModel. How to do this?
If you want a POST /Grapes to be behave differently from a POST /Oranges, you can attach your controller to both paths and evaluate the path inside your code.
const createPost = async (req, res) => {
let newPostMessage;
if (req.path === "/Oranges") newPostMessage = new OrangeModel(...);
else if (req.path === "/Grapes") newPostMessage = new GrapeModel(...);
try {
await newPostMessage.save();
...
};
app.post(["/Oranges", "/Grapes"], createPost);
Also I got the answer like this:
exports.createPost =Model=> async (req, res) => {
try {
const doc = await Model.create(req.body, {
new: true,
runValidators: true,
});
res.status(200).json({
status: 'success',
data: {
doc,
},
});
} catch (error) {
res.status(400).json({
status: 'fail',
message: error,
});
}
};
Here just call createPost function with the model name

Sqlite3 returning empty array with GET request in Express

I am trying to make a get request to an sqlite3 table, using Express, based on input from a form. The fetch request works and so does the db.all, but I receive a response as an empty array from rows. I tried req.query and req.params already. Not sure where the error is.
//server.js
app.get('/names/state', (req, res, next) => {
const stateValue = req.query.state;
db.all(`SELECT name FROM states WHERE name=$stateVal`,
{
$stateVal: stateValue
},
(err, rows) => {
res.send({rows:rows});
})
});
//script.js
const fetchOneBtn = (e) => {
e.preventDefault();
const stateVal = stateInputValue.value;
fetch(`/names/state?state=${stateVal}`)
.then(response =>{
if(response.ok){
return response.json();
}
}).then(names => {
console.log(names);
})
};
You can change your code in your backend with this code below:
app.get('/names/state', (req, res, next) => {
const stateValue = req.query.state;
var query = "SELECT name FROM states WHERE name = " + stateValue;
db.all(query, (err, rows) => {
if(err) {
console.log(err);
res.status(500).send(err);
}else {
res.send({rows});
}
})
});
Now, for your frontend, you can change with the code below:
const fetchOneBtn = async (e) => {
e.preventDefault();
const stateVal = stateInputValue.value;
try {
const response = await fetch(`/names/state?state=${stateVal}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json'
},
});
console.log(await response.json());
return await response.json();
} catch(ex) {
console.log(ex);
}
};
I hope it can help you.

passing Tedious connection as parameter

I am trying to use a simple suite of functions built utilizing the Tedious library to access a Microsoft SQL Server. Here is my "tools" file:
'use strict';
const tedious = require('tedious');
const q = require('q');
var Connection = tedious.Connection;
var Request = tedious.Request;
module.exports = {
connectSQL : function(config) {
var connection = new Connection(config);
connection.on('connect', function(err) {
if (err) {
console.log('FAIL ON CONNECT');
console.log(err);
} else {
try {
/* ----- */
return connection;
} catch (err) {
console.log(err);
return;
}
}
});
connection.on('error', function(err) {
if (err) {
console.log('FAIL ON ERROR');
console.log(err);
} else {
console.log("Error called with no err object.");
}
});
},
executeSQL: function(connection, requestString) {
var results = [];
var request = new Request( requestString , function(err, data) {
if (err) {
console.log(err);
} else {
console.log( data );
}
});
request.on('row', function(row) {
//console.log(row);
results.push( row );
});
request.on('requestCompleted', function(){
console.log('Finished');
return results;
});
connection.execSql(request);
}
}
I call these functions as follows in my server file.
const sqlTools = require('./sqlTools.js');
var connection = sqlTools.connectSQL(config);
sqlTools.executeSQL(connection, "select * from dbo.test");
However, I get the error "TypeError: Cannot read property 'execSql' of undefined", even if I make the program sleep for 10 seconds before calling my function sqlTools.executeSQL (obviously not ideal).
I was able to get this to work by calling the request within the sqlTools.connectSQL function (at the "/* ----- */"), but I want to re-use the Tedious connection to make multiple calls. Any suggestions? Thanks!
~~~~~~~EDIT~~~~~~~~~~
With help from akinjide I was able to implement callbacks that allow me to make a single call to my SQL database. However, I am struggling to implement promises to make subsequent calls. I changed my "tools" file as such:
'use strict';
const tedious = require('tedious');
const q = require('q');
var Connection = tedious.Connection;
var Request = tedious.Request;
module.exports = {
connectSQL: function(config) {
var deferred = q.defer();
var connection = new Connection(config);
connection.on('connect', function(err) {
if (err) {
deferred.reject( err );
} else {
deferred.resolve( connection );
}
});
connection.on('error', function(err) {
deferred.reject(err);
});
return deferred.promise;
},
executeSQL: function(connection, requestString, callback) {
var results = [];
const request = new Request(requestString, function(err) {
callback(err);
});
request.on('row', function(row) {
results.push(row);
});
request.on('requestCompleted', function() {
console.log('request completed!');
callback(null, results);
});
connection.execSql(request);
}
}
and I call this code like this...
var promise = sqlTools.connectSQL(config);
promise.then(function (connection) {
sqlTools.executeSQL(connection, "select * from dbo.test", function(err, results) {
if (err) {
console.log(err);
}
console.log(results);
});
}).catch(function (err) {
console.log(err);
}).then(function (connection) {
sqlTools.executeSQL(connection, "select * from dbo.test2", function(err, results) {
if (err) {
console.log(err);
}
console.log(results);
});
}).catch(function(err) {
console.log(err);
});
This returns the first call's results correctly, but unfortunately returns this error "TypeError: Cannot read property 'execSql' of undefined" for the second call as it is not recognizing the connection the second time around. Any suggestions?
A better approach would be to pass a node.js callback style function as an argument to connectSQL.
return keyword won't work within an asynchronous program.
'use strict';
const tedious = require('tedious');
const Connection = tedious.Connection;
const Request = tedious.Request;
module.exports = {
connectSQL: function(config, callback) {
const connection = new Connection(config);
connection.on('connect', function(err) {
if (err) {
callback(err);
} else {
callback(null, connection);
}
});
connection.on('error', function (err) {
callback(err);
});
},
executeSQL: function(connection, requestString, callback) {
let results = [];
const request = new Request(requestString, function(err) {
callback(err);
});
request.on('row', function(row) {
results.push(row);
});
request.on('requestCompleted', function(){
console.log('Finished');
callback(null, results);
});
connection.execSql(request);
}
}
Then you can require, use sqlTools.connectSQL passing two parameters config and function(err, connection) {}
const sqlTools = require('./sqlTools');
sqlTools.connectSQL(config, function(err, connection) {
if (err) {
console.log('FAIL ON CONNECT');
console.log(err);
}
sqlTools.executeSQL(connection, "select * from dbo.test", function (err, results) {
if (err) {
console.log(err);
}
console.log(results);
});
});

ExpressJS Multer: Upload image to server

I'm newer with Node.js and Express.js.
I want to upload first a image into the server (directory: uploads/spots), and then (synchronous) upload the rest of form data in MongoDB.
I'm using REST (Method Post)
app.route('/spots').post(users.requiresLogin, spots.create);
and I'm using Multer for updating the image into the server, and works.
app.use(multer(
{ dest: './public/uploads/spots',
onFileUploadStart: function (file) {
var imagePath = file.path;
gm(imagePath).resize(850, 850).quality(70).noProfile().write('public/uploads/spots/850x850/'+file.name, function (err) {
if (!err) {
gm(imagePath).resize(150, 150).quality(70).noProfile().write('public/uploads/spots/150x150/'+file.name, function (err) {
if (!err) {
}
else{
console.log('Error: '+err);
}
});
}
else{
console.log('Error: '+err);
}
});
}
}));
Is working, but is asynchronous , and returns the response to frontend before that the image will be upload into the server.
My question is how to do this but synchronous and how to return the response to the frontend after that the image was uploaded.
Thank you!
spots.server.routes.js
'use strict';
module.exports = function(app) {
var gm = require('gm');
var multer = require('multer');
var users = require('../controllers/users.server.controller.js');
var spots = require('../controllers/spots.server.controller.js');
//Upload image
app.use(multer(
{ dest: './public/uploads/spots',
onFileUploadStart: function (file) {
var imagePath = file.path;
gm(imagePath).resize(850, 850).quality(70).noProfile().write('public/uploads/spots/850x850/'+file.name, function (err) {
if (!err) {
gm(imagePath).resize(150, 150).quality(70).noProfile().write('public/uploads/spots/150x150/'+file.name, function (err) {
if (!err) {
}
else{
console.log('Error: '+err);
}
});
}
else{
console.log('Error: '+err);
}
});
}
}));
// Spots Routes
app.route('/spots')
.get(spots.list)
.post(users.requiresLogin, spots.create);
app.route('/spots/:spotId')
.get(spots.read)
.put(users.requiresLogin, spots.update)
.delete(users.requiresLogin, spots.hasAuthorization, spots.delete);
// Finish by binding the Spot middleware
app.param('spotId', spots.spotByID);
};
spots.server.controller.js (create method)
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller.js'),
Spot = mongoose.model('Spot'),
_ = require('lodash'),
fs = require('fs');
/**
* Create a Spot
*/
exports.create = function(req, res) {
var spot = new Spot(JSON.parse(req.body.spot));
spot.user = req.user;
if(req.files.file)
spot.image=req.files.file.name;
else
spot.image='default.jpg';
spot.save(function(err) {
if (err) {
fs.unlinkSync('public/uploads/spots/'+spot.image);
fs.unlinkSync('public/uploads/spots/850x850/'+spot.image);
fs.unlinkSync('public/uploads/spots/150x150/'+spot.image);
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
var socketio = req.app.get('socketio'); // tacke out socket instance from the app container
socketio.sockets.emit('spot.created.'+spot.municipality, {spot:spot, user:req.user});
socketio.sockets.emit('spot.created.'+spot.province, {spot:spot, user:req.user});
socketio.sockets.emit('spot.created.'+spot.community, {spot:spot, user:req.user});
socketio.sockets.emit('spot.created.'+spot.country, {spot:spot, user:req.user});
res.jsonp(spot);
}
});
};
/**
* Spot authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.spot.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};
The solution is not use onFileUploadStart method and use a function with callback in the controller.
routes
// Spots Routes
app.route('/spots')
.get(spots.list)
.post(users.requiresLogin,multer({ dest: './public/uploads/spots'}), spots.create);
controller
exports.create = function(req, res) {
if (req.files.file)
exports.uploadImage(req.files.file,callback);
else
callback();
function callback(){
var spot = new Spot(JSON.parse(req.body.spot));
spot.user = req.user;
if (req.files.file)
spot.image = req.files.file.name;
else
spot.image = 'default.jpg';
spot.save(function (err) {
if (err) {
fs.unlink('public/uploads/spots/850x850/'+spot.image);
fs.unlink('public/uploads/spots/150x150/'+spot.image);
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
var socketio = req.app.get('socketio'); // tacke out socket instance from the app container
socketio.sockets.emit('spot.created.' + spot.municipality, {spot: spot, user: req.user});
socketio.sockets.emit('spot.created.' + spot.province, {spot: spot, user: req.user});
socketio.sockets.emit('spot.created.' + spot.community, {spot: spot, user: req.user});
socketio.sockets.emit('spot.created.' + spot.country, {spot: spot, user: req.user});
req.spot = spot;
Feedback.subscribeSpot(req);
Notify.getLocalSubscriptors(spot.municipality,spot.province,spot.community,spot.country,function(subscriptions){
Notify.create(req,null,spot,null,null,null,subscriptions,'spots/'+spot._id,false,'SPOT_CREATED', function(){
res.jsonp(spot);
});
});
}
});
}
};
exports.uploadImage = function(file, fn){
var imagePath = file.path;
gm(imagePath).resize(850, 850).quality(70).noProfile().write('public/uploads/spots/850x850/'+file.name, function (err) {
if (!err) {
gm(imagePath).resize(150, 150).quality(70).noProfile().write('public/uploads/spots/150x150/'+file.name, function (err) {
if (!err) {
if(fn)fn();
}
else{
console.log('Error: '+err);
}
});
}
else{
console.log('Error: '+err);
}
});
};