I'm trying to send a simple POST request for a contact form in a static site built on top of NuxtJs.
Im trying to use express and nodemailer in the serverMiddleware Nuxt provides
here's the code from api/contact.js
const express = require('express')
const nodemailer = require('nodemailer')
const validator = require('validator')
const xssFilters = require('xss-filters')
const app = express()
app.use(express.json())
const rejectFunctions = new Map([
['name', a => a.trim().length < 4],
['email', v => !validator.isEmail(v)],
['message', v => v.trim().length < 10]
])
const validateAndSanitize = (key, value) => {
return (
rejectFunctions.has(key) &&
!rejectFunctions.get(key)(value) &&
xssFilters.inHTMLData(value)
)
}
const sendMail = (name, email, msg) => {
const transporter = nodemailer.createTransport({
sendmail: true,
newline: 'unix',
path: '/usr/sbin/sendmail'
})
transporter.sendMail(
{
from: email,
to: 'johndoe#mail.com',
subject: 'New Contact Form',
text: msg
}
)
}
app.post('/', (req, res) => {
const attributes = ['name', 'email', 'message']
const sanitizedAttributes = attributes.map(attr =>
validateAndSanitize(attr, req.body[attr])
)
const someInvalid = sanitizedAttributes.some(r => !r)
if (someInvalid) {
return res
.status(422)
.json({ error: 'Error : at least one fiel is invalid' })
}
sendMail(...sanitizedAttributes)
res.status(200).json({ message: 'OH YEAH' })
})
export default {
path: '/api/contact',
handler: app
}
Everything works as intended until the sendMail function where I end up with this error :
(node:10266) UnhandledPromiseRejectionWarning: Error: spawn /usr/sbin/sendmail ENOENT
at Process.ChildProcess._handle.onexit (internal/child_process.js:246:19)
at onErrorNT (internal/child_process.js:427:16)
at processTicksAndRejections (internal/process/next_tick.js:76:17)
(node:10266) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 6)
Any idea what Im doing wrong?
The error
Error: spawn /usr/sbin/sendmail ENOENT
suggests that nodemailer was looking for sendmail in /usr/sbin/ but haven't found it (this is the default location and can be configured in the path option of createTransport). First, make sure it doesn't exist (like AnFi has suggested) by running
ls -l /usr/sbin/sendmail
If you get
ls: cannot access '/usr/bin/sendmail': No such file or directory
then install sendmail by running
sudo apt-get install sendmail
Then, try ls -l /usr/sbin/sendmail and sending email again.
Related
Getting the below error while trying to deploy a smart contract from hardhat. Error details
TypeError: Cannot read property 'sendTransaction' of null
at ContractFactory.<anonymous> (C:\Collection\node_modules\#ethersproject\contracts\src.ts\index.ts:1249:38)
at step (C:\Collection\node_modules\#ethersproject\contracts\lib\index.js:48:23)
at Object.next (C:\Collection\node_modules\#ethersproject\contracts\lib\index.js:29:53)
at fulfilled (C:\Collection\node_modules\#ethersproject\contracts\lib\index.js:20:58)
Here are the config files
hardhat.config.js
require('#nomiclabs/hardhat-waffle');
require("#nomiclabs/hardhat-ethers");
require("dotenv").config();
// This is a sample Hardhat task. To learn how to create your own go to
// https://hardhat.org/guides/create-task.html
task("accounts", "Prints the list of accounts", async (taskArgs, hre) => {
const accounts = await hre.ethers.getSigners();
for (const account of accounts) {
console.log(account.address);
}
});
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
/**
* #type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
solidity: "0.8.2",
networks: {
mumbai: {
url: process.env.MUMBAI_URL,
account: process.env.PRIVATE_KEY
}
}
};
deploy.js
const {ethers} = require("hardhat");
async function main() {
const SuperMario = await ethers.getContractFactory("SuperMario");
const superInstance = await SuperMario.deploy("SuperMarioCollection", "SMC");
await superInstance.deployed();
console.log("contract was deployed to:", superInstance.address());
await superInstance.mint("https://ipfs.io/ipfs/XXXXXXX");
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
I am trying to deploy it using the following command
npx hardhat run scripts/deploy.js --network mumbai
thanks
Change account to accounts in the network config
found the fix. There was an error in the hardhat.config file
instead of account:, it should have been
accounts:[process.env.PRIVATE_KEY]
I'm trying to run a vue ssr app on cloudflare workers.
I generated a new project using wrangler generate test
I installed vue using npm install vue#next and npm install #vue/server-renderer
I edited the index.js file like this:
const { createSSRApp } = require('vue')
const { renderToString } = require('#vue/server-renderer')
const app = createSSRApp({
data: () => ({ msg: 'hello' }),
template: `<div>{{ msg }}</div>`
})
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
async function handleRequest(request) {
const html = await renderToString(app)
return new Response(html, {status: 200})
}
I then used wrangler dev to test it, but when I access the page I get this error:
ReferenceError: __VUE_PROD_DEVTOOLS__ is not defined
at Module.<anonymous> (worker.js:8:104768)
at n (worker.js:1:110)
at Object.<anonymous> (worker.js:8:104943)
at n (worker.js:1:110)
at worker.js:1:902
at worker.js:1:912
Any help or guidance is appreciated
I faced similar issue and was able to fix it by defining a global constant (VUE_PROD_DEVTOOLS = false) during compile time.
Here is how my webpack prod config looks like:
const webpack = require('webpack');
const { merge } = require("webpack-merge");
const webpackCommon = require("./webpack.common");
const prodConfig = {
mode: 'production',
plugins: [
new webpack.DefinePlugin({
__VUE_PROD_DEVTOOLS__: JSON.stringify(false)
}),
]
};
module.exports = merge(webpackCommon, prodConfig);
Currently, I'm developing a way to upload a message (file and fields) from Dropzone to Mongoose using Express Router. My back-end controller (which is called after authentication and data validation) goes as follows:
//Import Internal Dependencies
const Loader = require('../models/loader.js');
const Formidable = require('formidable');
const fs = require('fs');
module.exports = {
load: async (req, res, next) => {
var form = new Formidable.IncomingForm();
let path;
let contentType;
await form.parse(req, async function (err, fields, files) {
if (err) {
return res.status(404).json(err);
} else {
const {
user,
patient,
condition,
compound,
classi
} = fields;
path = files.image.path;
contentType = files.image.type;
fs.readFile(path, async function (err, data) {
if (err) {
return res.status(404).json(err);
} else {
//Save load
const newLoader = new Loader({
user,
patient,
condition,
compound,
classi,
image: {
data,
contentType
}
});
//Delete image in local storage
await fs.unlink(path, function (error) {
if(error){
return res.status(404).json(error)
}
});
await newLoader.save();
res.status(200).json("Load image sucessfully.");
next()
}
})
}
});
}
};
When I test it with Postman I got a status 202 and images are successfully upload to the database. However, when I try to upload with dropzone without the fields (which should cause some error and be displayed in dropzone) I got the following errors/warning in the back-end console (Dropzone stoped at upload and didn't show any error):
(node:20834) UnhandledPromiseRejectionWarning: ValidationError: load validation failed: user: Path `user` is required., classi: Path `classi` is required.
at new ValidationError (/root/aimuneBack/node_modules/mongoose/lib/error/validation.js:27:11)
at model.Document.invalidate (/root/aimuneBack/node_modules/mongoose/lib/document.js:1876:32)
at p.doValidate.skipSchemaValidators (/root/aimuneBack/node_modules/mongoose/lib/document.js:1744:17)
at /root/aimuneBack/node_modules/mongoose/lib/schematype.js:808:9
at _combinedTickCallback (internal/process/next_tick.js:131:7)
at process._tickCallback (internal/process/next_tick.js:180:9)
(node:20834) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:20834) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
at emitWarning (internal/process/promises.js:92:15)
at emitPendingUnhandledRejections (internal/process/promises.js:109:11)
at process._tickCallback (internal/process/next_tick.js:189:7)
POST /load - - ms - -
So I know I have done something wrong with my asynchronous code and unfortunately cannot figure it out. Hope you can help. Best regards, Andre
After I clone the repository using nodegit and call getHeadCommit(), the Node process holds the directory, which prevents it from being removed by code (fs-extra remove), nor by OS.
console.log((async (): Promise<void> => {
const tempDirectory: string = path.join(process.cwd(), '.tmp');
console.log('clone');
const repository: nodegit.Repository = await nodegit.Clone.clone(
'ssh://git#***.git',
tempDirectory,
{
checkoutBranch: 'master',
fetchOpts: {
callbacks: {
certificateCheck: (): number => 1,
credentials: (url: string, userName: string): nodegit.Cred =>
nodegit.Cred.sshKeyNew(
userName,
path.join(os.homedir(), '.ssh', 'id_rsa.pub'),
path.join(os.homedir(), '.ssh', 'id_rsa'),
''
)
}
}
}
);
console.log('get head commit');
const commit: nodegit.Commit = await repository.getHeadCommit();
console.log('remove');
await fse.remove(tempDirectory); // Here Node hangs
console.log('end');
})());
Error message:
Error: EBUSY: resource busy or locked, unlink '***\.tmp\.git\objects\pack\pack-27924883cff8a0039ced57d07bad35459885ff9d.pack'
Is there an error in my code? Or is there a method in nodegit for releasing the repository directory after using repository.getHeadCommit()?
Oops! I missed the free method. It fixed the problem.
I want to download file from other website to my pc using expressjs
I tried to use: res.download to download but it seems to be worked on my own server only
Here is my code:
res.download('http://apkleecher.com/download/dl.php?dl=com.instagram.android', 'folder', function(err){
if (err) {
console.log(err);
} else {
}
});
And it return the error:
{ Error: ENOENT: no such file or directory, stat '/home/keitaro/Desktop/google-play/http:/apkleecher.com/download/dl.php?dl=com.instagram.android'
errno: -2,
code: 'ENOENT',
syscall: 'stat',
path: '/home/keitaro/Desktop/google-play/http:/apkleecher.com/download/dl.php?dl=com.instagram.android',
expose: false,
statusCode: 404,
status: 404 }
In my guess, the problem is in the path of url.
Turning my comment into an answer since it worked for you...
You can fetch a resource from a remote web server using either http.get() or the request() module in node. If you like to use promises for your asynchronous operations, then the request-promise module is a promisified version of the request module and works great.
You can also use just plain http.get(), but it's a lot more work because you have to read the stream, rather the results yourself and install appropriate error handling, all of which the request() module does for you in one simple call.
Here's a simple example using the request-promise module:
const rp = require('request-promise');
rp('http://www.google.com').then(function (htmlString) {
// Process html...
}).catch(function (err) {
// process error here
});
res.download requires a path to your local filesystem.
try this:
res.redirect("http://apkleecher.com/download/dl.php?dl=com.instagram.android")
best way to download remote file is use stream .its use small mount of memory
**npm i got**
//========================
const got=require('got');
const fs=require('fs');
const path=require('path');
file_downloader(link,file_name){
var file_path = path.join(__dirname,file_name);
await got.stream(encodeURI(link))
.on('response', async (data) => {
//first response check headers like ['content-length']
})
.on('error', async (error) => {
console.log("===========Stream Error======= ");
console.log(error);
console.log("===========//End Stream Error======= ");
})
.on('downloadProgress', async (progress) => {
console.log(file.name, progress);
})
.pipe(fs.createWriteStream(file_path));
}