uploading large video files to s3 with express.js - amazon-s3

i'm uploading media files to s3 with express and multer-s3. it's working fine if i upload files less than 10 mb.
if i upload larger than 7mb( i tested for. if i upload around 30mb it'll fail), it gives error.cannot determine length of string
I'm uploading .mp4 file
Error Message
Exception has occurred: Error
Error: Cannot determine length of [object Object]
at byteLength (projectpath/node_modules/aws-sdk/lib/util.js:200:26)
at ManagedUpload.adjustTotalBytes (projectpath/node_modules/aws-sdk/lib/s3/managed_upload.js:299:25)
at ManagedUpload.configure (projectpath/node_modules/aws-sdk/lib/s3/managed_upload.js:122:10)
at new ManagedUpload (projectpath/node_modules/aws-sdk/lib/s3/managed_upload.js:93:10)
at features.constructor.upload (projectpath/node_modules/aws-sdk/lib/services/s3.js:1087:20)
at S3Storage.<anonymous> (projectpath/node_modules/multer-s3/index.js:182:26)
at projectpath/node_modules/multer-s3/index.js:68:10
at FileStream.<anonymous> (projectpath/node_modules/multer-s3/index.js:47:5)
at Object.onceWrapper (events.js:286:20)
at FileStream.emit (events.js:198:13)
at FileStream.EventEmitter.emit (domain.js:448:20)
at FileStream.Readable.read (_stream_readable.js:491:10)
at flow (_stream_readable.js:957:34)
at resume_ (_stream_readable.js:938:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
my code
Body parser config
app.use(bodyparser.urlencoded({ extended: true, limit: '1024mb' }));
app.use(bodyparser.json({}));
file route
router.route('/faqvideoupload')
.post(faqFileUpload.array('files', 1), uploadFileHandler)
faqFileUpload
module.exports.faqFileUpload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: process.env.BUCKET_NAME,
contentDisposition: 'inline',
// contentLength: 1 * 1024 * 1024 * 1024,
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname, });
},
key: function (req, file, cb) {
// console.log(file, 23244324)
cb(null, `faqs/videos/filename}`)
}
})
})
uploadFileHandler
async function uploadFileHandler(req, res){
try {
let files = req.files.map((img)=> {
return {
key : img.key, location : img.location, type: img.mimetype
}
})
return res.json({
success : true,
files
})
} catch (error) {
return res.json({
success: false,
message: error.message
})
}
}
do i need to change any setting in s3 bucket itself?
i also read that i have to contentLength but this also not work.
i'm using t2.micro ec2 with eb

Related

AWS SDK for JavaScript v3 PutObjectCommand error 'A header you provided implies functionality that is not implemented'

I'm trying to upload a file with node.js from my client app (electron) to an S3 bucket in this manner:
const { S3Client, PutObjectCommand } = require('#aws-sdk/client-s3');
const s3Client = new S3Client({
region: 'eu-central-1',
credentials: {
accessKeyId: 'access',
secretAccessKey: 'secret',
},
});
const uploadFileToS3 = async (f) => {
const bucketParams = {
ACL: 'private',
Bucket: 'bucket',
Key: f.name,
Body: f.data,
ServerSideEncryption: 'AES256',
ContentType: 'image/png',
};
try {
return await s3Client
.send(new PutObjectCommand(bucketParams))
.then((result) => {
return process.send({
type: 'success',
fileName: f.name,
result,
});
});
} catch (erro) {
process.send({
type: 'error',
fileName: f.name,
error: erro,
});
}
};
process.on('message', (file) => {
uploadFileToS3(file);
});
I get the following error, that I'm unable to understand:
error: {
name: 'NotImplemented',
'$fault': 'client',
'$metadata': {
httpStatusCode: 501,
requestId: 'PXEBV6H4MX3',
extendedRequestId: 'yyyyyy',
attempts: 1,
totalRetryDelay: 0
},
Code: 'NotImplemented',
Header: 'Transfer-Encoding',
RequestId: 'PXEBV6H4MX3',
HostId: 'yyyyyy',
message: 'A header you provided implies functionality that is not implemented'
}
The file is a buffer generated with:
fs.readFileSync(pth)
Any idea of what could caused this error ?
Seems like the buffer created with
fs.readFileSync(pth)
was rejected and I could only use a stream:
const readableStream = await createReadStream(Buffer.from(f));
Maybe I'm wrong but it is possible that the actual SDK version is unable to accept a buffer yet, this could be the reason for that "missing functionality" message.

using unlinkAsync file deleted and throws Error: ENOENT: no such file or directory, open 'E:\Mern Stack\udyog-asha\backend\public\cs.word.pdf'

app.use(express.static("public"));
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "public");
},
filename: (req, file, cb) => {
cb(null, file.fieldname + Date.now() + file.originalname);
},
});
const upload = multer({ storage }).array("file", 18);
var i = 0;
while (i < 18) {
if (req.files[i] != undefined) {
console.log("MY" + i);
const filePath_1 = path.join(
__dirname,
`../public/${req.files[i].filename}`
);
console.log(response_2);
const response_file_1 = await drive.files.create({
requestBody: {
parents: [response_2.data.id],
name: req.files[i].filename, //This can be name of your choice
mimeType: "application/pdf",
},
media: {
mimeType: "application/pdf",
body: fs.createReadStream(filePath_1),
},
})
unlinkAsync(req.files[i].path);
}
i++;
}
I have done all the necessary conditions of initializing milter and unlinkAsync. Everything is working fine but when it comes to delete the file from multer storage, file got deleted but throws the error No such file directory exist.
Error in deleting the file, I don't know what to do please somebody help.
here is the full error:
throw er; // Unhandled 'error' event
^
Error: ENOENT: no such file or directory, open 'E:\Mern Stack\udyog-asha\backend\public\cs.word.pdf'
Emitted 'error' event on ReadStream instance at:
at internal/fs/streams.js:132:12
at FSReqCallback.oncomplete (fs.js:154:23) {
errno: -4058,
code: 'ENOENT',
syscall: 'open',
path: 'E:\\Mern Stack\\udyog-asha\\backend\\public\\cs.word.pdf'
}
Actually the reason behind of throwing the error is because multer never save duplicate files and I was uploading multiple duplicate files.. before it can read the file and save it to drive files get deleted and it throws err.
So the solution is change the name. for example add timestamp so that there will be no error.
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "public");
},
filename: (req, file, cb) => {
cb(null, file.fieldname + Date.now() + file.originalname);
},
});
file.fieldname + Date.now() + file.originalname , this is the important line.

Multer-s3-transform with Sharp middleware in Express JS route, uploads correctly, but never calls next

I'm using multer-s3-transform to resize my image upload beore uploading to S3. Using Sharp for the resize. The transform does work fine, the image is resized and uploaded to S3 bucket, I can see the file in the bucket. However, the uploadToS3.single('profilePic') middleware never continues to next route which is end route.
What am I doing wrong?
exports.uploadToS3 = multer({
storage: multerS3({
s3: s3,
bucket: S3_BUCKET,
acl: 'public-read',
contentType: multerS3.AUTO_CONTENT_TYPE,
fileFilter: allowOnlyImages,
limits: {
fileSize: 1024 * 250 // We are allowing only 250K
},
shouldTransform: (req, file, cb) => {
// Is this an image file type?
cb(null, /^image/i.test(file.mimetype));
},
transforms: [{
id: 'thumbnail',
key: (req, file, cb) => {
const fn = `${S3_PROFILE_PICS}/${req.body.handle}/${createFilename(req, file)}`;
//console.log('transforms key fn: ', fn);
cb(null, fn);
},
transform: (req, file, cb) => {
//console.log('transform: ', file.originalname);
// Perform desired transformations
const width = parseInt(PROFILE_PIC_W);
const height = parseInt(PROFILE_PIC_H);
cb(null, sharp()
.resize(width, height)
.toFormat('jpeg')
);
}
}]
})
});
Route...
router.post('/register', uploadToS3.single('profilePic'), async (req, res) => {
...
...
});
It's something about the transform and/or Sharp. If I remove the Sharp transform and just upload the incoming image to S3, everything works fine.
Try this to see it work:
router.post('/register', uploadToS3.single('profilePic'), async (req, res, next) => {
...
...
});

Unexpected end of multipart data nodejs multer s3

iam trying to upload image in s3 this is my code
const upload = require('../services/file_upload');
const singleUpload = upload.single('image');
module.exports.uploadImage = (req,res) => {
singleUpload(req, res, function (err) {
if (err) {
console.log(err);
return res.status(401).send({ errors: [{ title: 'File Upload Error', detail: err}] });
}
console.log(res);
return res.json({ 'imageUrl': req.file.location });
});
}
FileUpload.js
const aws = require('aws-sdk');
const multer = require('multer');
const multerS3 = require('multer-s3');
const s3 = new aws.S3();
const fileFilter = (req, file, cb) => {
if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png') {
cb(null, true)
} else {
cb(new Error('Invalid Mime Type, only JPEG and PNG'), false);
}
}
const upload = multer({
fileFilter,
storage: multerS3({
s3,
bucket: 'image-bucket',
acl: 'public-read',
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, {fieldName: 'TESTING_META_DATA!'});
},
key: function (req, file, cb) {
cb(null,"category_"+Date.now().toString()+".png")
}
})
})
module.exports = upload;
I tried to test api with postmanin serverless local it is giving this error
Error: Unexpected end of multipart data
at D:\Flutter\aws\mishpix_web\node_modules\dicer\lib\Dicer.js:62:28
at process._tickCallback (internal/process/next_tick.js:61:11) storageErrors: []
After deploying online. i tried the api. the file is uploaded in server but its a broken
Are you using aws-serverless-express. aws-serverless-express will forward original request to Buffer as utf8 encoding. So multipart data will be lost or error. I am not sure why.
So, I change aws-serverless-express to aws-serverless-express-binary and everything worked.
yarn add aws-serverless-express-binary
Hope this help!

Multer-s3 dynamic s3 instance

I'm trying to upload files to my s3 bucket, using multer and multer-s3 for Nodejs. The problem that I have now is that I want to set up my s3 instance dynamically because the s3 account and the bucket depend on my user settings.
I have the following code:
My uploader
var uploader = multer({
storage: multerS3({
s3: function(req, file, cb){
cb(null, new AWS.S3({
// my s3 settings from req.body or getting directly from my db
}))
},
bucket: function (req, file, cb){
cb(null, req.body.bucket)
},
metadata: function (req, file, cb) {
cb(null, {
fieldName: file.fieldname
});
},
key: function (req, file, cb) {
console.log(`Key is ${req.body.prefix + file.originalname}`);
cb(null, req.body.prefix + file.originalname)
}
})
}).single('file');
api.post('/upload', uploader, function (req, res, next) {
if (err)
return res.status(500).json({ message: err });
res.status(200).json({
message: 'uploaded'
});
});
The problem is that multer-s3 doesn't allow a function for s3 option as parameter but an s3 object instead.
How can I aproach this?