using Multer to add array - express

I am using multer to add an array of images to my product document, I am getting the right name into the uploads folder, but undefind as a name in postman.
This is the code I tried:
router.put(
"/gallery-images/:id",
uploadOptions.array("images", 10),
async (req, res) => {
if (!mongoose.isValidObjectId(req.params.id)) {
res.status(400).send("Invalid product ID");
} //product ID validation
const files = req.files;
let imagesPaths = [];
const basePath = `${req.protocol}://${req.get("host")}/public/upload/`;
if (files) {
files.map((file) => {
imagesPaths.push(`${basePath}${file.fileName}`);
console.log("uhu", file);
});
}
let product = await Product.findByIdAndUpdate(
req.params.id,
{
images: imagesPaths,
},
{ new: true }
);
if (!product) return res.status(500).send("The product cannot be updated");
res.send(product);
}
);

It worked with this change:
const basePath = `${req.protocol}://${req.get("host")}/public/upload/`;
if (files) {
// files.map((file) => {
// imagesPaths.push(`${basePath}${file.fileName}`);
// });
imagesPaths = files.map((file) => {
const fileName = file.filename;
return `${basePath}${fileName}`;
});
}

Related

Jest integration test Express REST API with Mongoose

everybody. I'm new to unit/integration testing and I'm having trouble with testing one of my API routes which involves file system operations and Mongoose model method calls. I need to be able mock mongoose model method as well as router's post method. Let me share you my router's post method.
documents.js
const { User } = require('../models/user');
const { Document } = require('../models/document');
const isValidObjectId = require('./../helpers/isValidObjectId');
const createError = require('./../helpers/createError');
const path = require('path');
const fs = require('fs');
const auth = require('./../middlewares/auth');
const uploadFile = require('./../middlewares/uploadFile');
const express = require('express');
const router = express.Router();
.
.
.
router.post('/mine', [auth, uploadFile], async (req, res) => {
const user = await User.findById(req.user._id);
user.leftDiskSpace(function(err, leftSpace) {
if(err) {
return res.status(400).send(createError(err.message, 400));
} else {
if(leftSpace < 0) {
fs.access(req.file.path, (err) => {
if(err) {
res.status(403).send(createError('Your plan\'s disk space is exceeded.', 403));
} else {
fs.unlink(req.file.path, (err) => {
if(err) res.status(500).send('Silinmek istenen doküman diskten silinemedi.');
else res.status(403).send(createError('Your plan\'s disk space is exceeded.', 403));
});
}
});
} else {
let document = new Document({
filename: req.file.filename,
path: `/uploads/${req.user.username}/${req.file.filename}`,
size: req.file.size
});
document.save()
.then((savedDocument) => {
user.documents.push(savedDocument._id);
user.save()
.then(() => res.send(savedDocument));
});
}
}
});
});
.
.
.
module.exports = router;
documents.test.js
const request = require('supertest');
const { Document } = require('../../../models/document');
const { User } = require('../../../models/user');
const mongoose = require('mongoose');
const fs = require('fs');
const path = require('path');
const config = require('config');
let server;
describe('/api/documents', () => {
beforeEach(() => { server = require('../../../bin/www'); });
afterEach(async () => {
let pathToTestFolder = path.join(process.cwd(), config.get('diskStorage.destination'), 'user');
await fs.promises.access(pathToTestFolder)
.then(() => fs.promises.rm(pathToTestFolder, { recursive: true }))
.catch((err) => { return; });
await User.deleteMany({});
await Document.deleteMany({});
server.close();
});
.
.
.
describe('POST /mine', () => {
let user;
let token;
let file;
const exec = async () => {
return await request(server)
.post('/api/documents/mine')
.set('x-auth-token', token)
.attach('document', file);
}
beforeEach(async () => {
user = new User({
username: 'user',
password: '1234'
});
user = await user.save();
user.leftDiskSpace(function(err, size) { console.log(size); });
token = user.generateAuthToken();
file = path.join(process.cwd(), 'tests', 'integration', 'files', 'test.json');
});
.
.
.
it('should return 400 if an error occurs during calculation of authorized user\'s left disk space', async () => {
jest.mock('../../../routes/documents');
let documentsRouter = require('../../../routes/documents');
let mockReq = {};
let mockRes = {}
let mockPostRouter = jest.fn();
mockPostRouter.mockImplementation((path, callback) => {
if('path' === '/mine') callback(mockReq, mockRes);
});
documentsRouter.post = mockPostRouter;
let error = new Error('Something went wrong...');
const res = await exec();
console.log(res.body);
expect(res.status).toBe(400);
expect(res.body.error).toHaveProperty('message', 'Something went wrong...');
});
.
.
.
});
});
What I want to do is, I need to be able call a mock user.leftDiskSpace(function(err, leftSpace)) user model method inside router.post('/mine', ...) route handler. I need to be able to get inside the if and else brances by callback function of user.leftDiskSpace(). How can I do that?
Thanks in advance.

stream s3 to dynamodb with fast-csv : not all data inserted

When a csv file is uploaded on my s3 bucket, my lambda will be triggered to insert my data into DynamoDB.
I need a stream because the file is too large to be downloaded as full object.
const batchWrite = async (clientDynamoDB, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const params = { RequestItems: ri };
await clientDynamoDB.batchWriteItem(params).promise();
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("data", (row) => {
stream.pause();
itemsToProcess.push(row);
if (itemsToProcess.length === sizeChunk) {
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
stream.resume();
});
itemsToProcess = [];
}
})
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on("end", () => {
stream.pause();
console.log("end");
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
resolve("OK");
});
});
});
}
module.exports.main = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const object = event.Records[0].s3;
const bucket = object.bucket.name;
const file = object.object.key;
const agent = new https.Agent({
keepAlive: true
});
const client = new AWS.DynamoDB({
httpOptions: {
agent
}
});
try {
//get Stream csv data
const stream = s3
.getObject({
Bucket: bucket,
Key: file
})
.createReadStream()
.on('error', (e) => {
console.log(e);
});
await runStreamPromiseAsync(stream, client);
} catch (e) {
console.log(e);
}
};
When my file is 1000 lines everything is inserted but when I have 5000 lines, my function insert only around 3000 lines and this number is random... Sometimes more sometimes less..
So I'd like to understand what am I missing here ?
I also read this article but to be honest even if you pause the second stream, the first one is still running.. So if someone have any ideas on how to do this, it would be greatly appreciated !
Thanks
I found out why It was not fully processed, it's because the callback of batchWriteItem can return unprocess Items. So I change the function batchWrite and also the runPromiseStreamAsync a little bit because i might not have all the items processed from itemsToProcess.
Anyway here is the full code :
const batchWrite = (client, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const items = { RequestItems: ri };
const processItemsCallback = function(err, data) {
return new Promise((resolve, reject) => {
if(!data || data.length === 0){
return resolve();
}
if(err){
return reject(err);
}
let params = {};
params.RequestItems = data.UnprocessedItems;
return client.batchWriteItem(params, processItemsCallback);
});
};
return client.batchWriteItem(items, processItemsCallback );
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
let arrayPromise = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on('data', data => {
itemsToProcess.push(data);
if(itemsToProcess.length === sizeChunk){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
itemsToProcess = [];
}
})
.on('end', () => {
if(itemsToProcess.length !== 0){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
}
resolve(Promise.all(arrayPromise).catch(e => {
reject(e)
}));
});
});
}

Request body is empty when submitting data using "form data"

When I update using raw JSON, it's working but when I use the form data it is not updating. the request body when using form data is an empty object. Why is this happening?
Here's my update code:
exports.updateProgram = catchAsync(async (req, res, next) => {
console.log('req ko body',req.body)
let doc = await Program.findByIdAndUpdate(req.params.id, req.body, { runValidators: true, new: true })
if (!doc) {
return next(new AppError('No document found with that ID', 404))
}
res.status(200).json({
status: 'success!',
data: { doc }
})
})
In Postman:
I am using multer, I actually pass the photo in req.body. Here's the code:
let multerStorage = multer.memoryStorage()
let multerFilter = (req, file, cb) => {
if (file.mimetype.split('/')[0] == 'image') {
cb(null, true)
} else {
cb(new AppError('Not an image!', 400), false)
}
}
let upload = multer({
storage: multerStorage,
fileFilter: multerFilter
})
exports.uploadPhotos = upload.fields([
{ name: 'abcd', maxCount: 10 },
{ name: 'photos', maxCount: 10 },
{name: 'photos3', maxCount: 10}
])
exports.resizePhotos = catchAsync(async (req, res, next) => {
// if (!req.files.photos || !req.files.abcd) return next()
if(req.files.abcd) {
req.body.abcd = []
await Promise.all(req.files.abcd.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/arpit/${filename}`)
req.body.abcd.push(filename)
})
)} else if(req.files.photos3) {
req.body.photos3 = []
await Promise.all(req.files.photos3.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/arpit/${filename}`)
req.body.photos3.push(filename)
})
)}
else if(req.files.photos) {
// console.log('codee here')
// } else if(req.body.photos) {
req.body.photos = []
console.log('req.files>>>', req.files)
await Promise.all(req.files.photos.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/programs/${filename}`)
req.body.photos.push(filename)
})
)
}
return next()
})
I'm importing in the routes file
Express (bodyParser) can't handle multipart form-data and that's why your code isn't working.
Take a look at multer, an express package. It is a middleware which provides the functionality you're looking for.
var cpUpload = upload.fields([{ name: 'avatar', maxCount: 1 }, { name: 'gallery', maxCount: 8 }]);
app.post('/cool-profile', cpUpload, function (req, res, next) {
// req.files is an object (String -> Array) where fieldname is the key, and the value is array of files
//
// e.g.
// req.files['avatar'][0] -> File
// req.files['gallery'] -> Array
//
// req.body will contain the text fields, if there were any
})
This might be help you. Quoted from https://www.npmjs.com/package/multer#readme

Image upload using react-admin

I am new to react-admin. I am using react-admin to upload the file. I have following the step mentioned below in tutorial.
But after I submit the request...I see http trace as follow. I see blob link instead of Base64 image payload.
{
"pictures": {
"rawFile": {
"preview": "blob:http://127.0.0.1:3000/fedcd180-cdc4-44df-b8c9-5c7196788dc6"
},
"src": "blob:http://127.0.0.1:3000/fedcd180-cdc4-44df-b8c9-5c7196788dc6",
"title": "Android_robot.png"
}
}
Can someone please advice how to get base64 image payload instead of link?
Check to see if you have this handler, most likely you did not change the name of the resource posts to your:
const addUploadCapabilities = requestHandler => (type, resource, params) => {
if (type === 'UPDATE' && resource === 'posts') {
Create your custom dataProvider to convert picture to base64
import restServerProvider from 'ra-data-json-server';
const servicesHost = 'http://localhost:8080/api';
const dataProvider = restServerProvider(servicesHost);
const myDataProfider = {
...dataProvider,
create: (resource, params) => {
if (resource !== 'your-route' || !params.data.pictures) {
// fallback to the default implementation
return dataProvider.create(resource, params);
}
const myFile = params.data.pictures;
if ( !myFile.rawFile instanceof File ){
return Promise.reject('Error: Not a file...'); // Didn't test this...
}
return Promise.resolve( convertFileToBase64(myFile) )
.then( (picture64) => ({
src: picture64,
title: `${myFile.title}`
}))
.then( transformedMyFile => dataProvider.create(resource, {
...params,
data: {
...params.data,
myFile: transformedMyFile
}
}));
}
};
const convertFileToBase64 = file => new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file.rawFile);
reader.onload = () => resolve(reader.result);
reader.onerror = reject;
});
export default myDataProfider;
And get image data at your Server API
exports.create = (req, res) => {
if(req.body.myFile){
var file = req.body.myFile;
var fs = require('fs');
var data = file.src.replace(/^data:image\/\w+;base64,/, "");
var buf = Buffer.from(data, 'base64');
fs.writeFile(`upload/${file.title}`, buf, err => {
if (err) throw err;
console.log('Saved!');
});
}};

upload an image to amazon s3 in react-native

I am trying to upload image to amazon s3,If possible can any one provide links /docs for how to upload to amazon s3, any help much appreciated
S3 options:
// this.state.s3options in YourComponent
{
"url": "https://yourapp.s3.eu-central-1.amazonaws.com",
"fields": {
"key": "cache/22d65141b48c5c44eaf93a0f6b0abc30.jpeg",
"policy": "eyJleHBpcm...1VDE0Mzc1OVoifV19",
"x-amz-credential": "AK...25/eu-central-1/s3/aws4_request",
"x-amz-algorithm": "AWS4-HMAC-SHA256",
"x-amz-date": "20161125T143759Z",
"x-amz-signature": "87863c360...b9b304bfe650"
}
}
Component:
class YourComponent extends Component {
// ...
// fileSource looks like: {uri: "content://media/external/images/media/13", isStatic: true}
async uploadFileToS3(fileSource) {
try {
var formData = new FormData();
// Prepare the formData by the S3 options
Object.keys(this.state.s3options.fields).forEach((key) => {
formData.append(key, this.state.s3options.fields[key]);
});
formData.append('file', {
uri: fileSource.uri,
type: 'image/jpeg',
});
formData.append('Content-Type', 'image/jpeg')
var request = new XMLHttpRequest();
request.onload = function(e) {
if (e.target.status === 204) {
// Result in e.target.responseHeaders.Location
this.setState({avatarSourceRemote: {uri: e.target.responseHeaders.Location}})
}
}.bind(this)
request.open('POST', this.state.s3options.url, true);
request.setRequestHeader('Content-type', 'multipart/form-data');
request.send(formData);
} catch(error) {
console.error(error);
}
}
// Example display the uploaded image
render() {
if (this.state.avatarSourceRemote) {
return (
<Image source={this.state.avatarSourceRemote} style={{width: 100, height: 100}} />
);
} else {
return (
<Text>No Image</Text>
);
}
}
}
This works for me
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
import AWS from 'aws-sdk';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'XXXXXXXXXX';
const IAM_USER_KEY = 'XXXXXXXXXX';
const IAM_USER_SECRET = 'XXXXXXXXXXXXXXX';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
utils.stopLoader();
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
};
This worked for me after a significant amount of trying over and over again...
I am also using a lambda function to serve me the link to post with.
The lambda function is just using getSignedUrl.
// Lambda Function
const AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: {bucket_access},
secretAccessKey: {bucket_secret},
signatureVersion: 'v4',
region: {bucket_region}
})
const s3 = new AWS.S3()
exports.handler = async (event) => {
const URL = s3.getSignedUrl('putObject', {Bucket: {bucket_name},
// name of file name being placed in S3 Bucket
// event === metaData object
Key: `${event.{key}}/photo00`})
return URL
};
// React Native
const imagePreview = '{image_uri}'
const handleURL = async () => {
// metaData object
const obj = {
key: "meta_data"
}
const response = await fetch{{lambda_func_endpoint}, {
method: 'POST',
body: JSON.stringify(obj)
})
const json = await response.json();
return json
}
const handleUpload = async () => {
const URL = await handleURL()
const imageExt = imagePreview.split('.').pop()
// I have no idea why you are supposed to fetch before fetching...
// makes no sense. But it works. Lots of trying as I said.
let image = await fetch(imagePreview)
// I have no idea why it needs to be a blob in order
// to upload... makes no sense.
image = await image.blob()
await fetch(URL, {
method: 'PUT',
body: image,
headers: {
Accept: `image/${imageExt}`,
'Content-Type': `image/${imageExt}`
}
})
.then((res) => console.log(JSON.parse(JSON.stringify(res)).status))
.catch((err) => console.error(err))
}
Let me know what you guys think!