Using aws-sdk to upload to DigitalOceans - amazon-s3

I'm using aws-sdk to upload images to DigitalOceans bucket. On localhost it works 100% but production seems like the function goes on without an error but the file does not upload to the bucket.
I cannot figure out what is going on and can't think of a way to debug this. tried aswell executing the POST request with Postman multipart/form-data + adding file to the body of the request and it is the same for localhost, working, and production is not.
my api endpoint:
import AWS from 'aws-sdk'
import formidable from "formidable"
import fs from 'fs'
const s3Client = new AWS.S3({
endpoint: process.env.DO_SPACES_URL,
region: 'fra1',
credentials: {
accessKeyId: process.env.DO_SPACES_KEY,
secretAccessKey: process.env.DO_SPACES_SECRET
}
})
export const config = {
api: {
bodyParser: false
}
}
export default async function uploadFile(req, res) {
const { method } = req
const form = formidable()
const now = new Date()
const fileGenericName = `${now.getTime()}`
const allowedFileTypes = ['jpg', 'jpeg', 'png', 'webp']
switch (method) {
case "POST":
try {
form.parse(req, async (err, fields, files) => {
const fileType = files.file?.originalFilename?.split('.').pop().toLowerCase()
if (!files.file) {
return res.status(400).json({
status: 400,
message: 'no files'
})
}
if (allowedFileTypes.indexOf(fileType) === -1) {
return res.status(400).json({
message: 'bad file type'
})
}
const fileName = `${fileGenericName}.${fileType}`
try {
s3Client.putObject({
Bucket: process.env.DO_SPACES_BUCKET,
Key: `${fileName}`,
Body: fs.createReadStream(files.file.filepath),
ACL: "public-read"
}, (err, data) => {
console.log(err)
console.log(data)
})
const url = `${process.env.FILE_URL}/${fileName}`
return res.status(200).json({ url })
} catch (error) {
console.log(error)
throw new Error('Error Occured While Uploading File')
}
});
return res.status(200)
} catch (error) {
console.log(error)
return res.status(500).end()
}
default:
return res.status(405).end('Method is not allowed')
}
}

Related

How do I make express middleware in class?

I currently use multer middleware like below
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "public");
},
filename: function (req, file, cb) {
cb(null, req.params.id + "_" + file.originalname);
},
});
export const multerUploadSingle = (req: Request, res: Response, next: NextFunction) => {
const upload = multer({ storage: storage }).single("file");
upload(req, res, (error: unknown) => {
if (error instanceof multer.MulterError) {
const message = `file upload fail: ${error.message}`;
next(new HttpException(message, HttpStatus.BadRequest));
} else if (error instanceof Error) {
const message = `file upload fail: ${error.message}`;
next(new HttpException(message, HttpStatus.InternalServerError));
} else {
// upload success
next();
}
});
}
and use in router like this
FileRouter.post("/upload/:id", multerUploadSingle, (req, res) => {...});
However, I felt I want to refactor this middleware in class, and rewrote the code like this,
export class Multer {
private readonly storage: multer.StorageEngine;
constructor() {
this.storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, "public");
},
filename: function (req, file, cb) {
cb(null, req.params.id + "_" + file.originalname);
},
});
}
uploadSingle(req: Request, res: Response, next: NextFunction) {
const upload = multer({ storage: this.storage }).single("file");
upload(req, res, (error: unknown) => {
if (error instanceof multer.MulterError) {
const message = `file upload fail: ${error.message}`;
next(new HttpException(message, HttpStatus.BadRequest));
} else if (error instanceof Error) {
const message = `file upload fail: ${error.message}`;
next(new HttpException(message, HttpStatus.InternalServerError));
} else {
// upload success
next();
}
});
}
}
const multer = new Multer();
FileRouter.post("/upload/:id", multer.uploadSingle, (req, res) => {...});
With my short knowledge, I think both case should have the same result, but the latter case which uses class made middleware doesn't work at all. It's seems method "uploadSingle" is never called, thus multer not uploading the file.
Did I make any mistake with class usage? or is it just express can only use function defined middleware?
Your code should follow the MVC pattern.
You can do stuff like this:
routerFile.js
const upload = require("../../configs/multer");
const postController = require("../../controllers/postController");
const multiUploadEvent = upload.fields([
{ name: "images", maxCount: 2 },
{ name: "video", maxCount: 2 }
]);
router.post("/add-event-post", multiUploadEvent, postController.addEventPost);
module.exports = router;
multer.js
const multer = require('multer');
const multerFilter = (req, file, cb) => {
console.log("Mime type :", file.mimetype.split('/')[0]);
if (file.mimetype.split('/')[0] === 'image' || file.mimetype.split('/')[0] === 'video' || file.mimetype.split('/')[0] === 'audio') {
cb(null, true);
} else {
cb(new Error('Please upload img, audio, or video file only.'), false);
}
};
const storage = multer.memoryStorage();
const upload = multer({
storage: storage,
fileFilter: multerFilter,
limits: {
fileSize: , 50 * 1024 * 1024// 50 Mb
},
});
module.exports = upload;
postController.js
const addEventPost = async (request, response) => {
try {
let { title, ..... } = request.body;
const images = request.files.images;
const video = request.files.video;
console.log(title);
console.log(images);
console.log(videos);
//upload to services likes aws and save to database
.
.
.
return response
.status(200)
.json({
message: "Event post added successfully"
});
} catch (error) {
console.log(error);
response.status(500).json({
error: "Something went wrong",
});
}
}

S3 to IPFS from Pinata

I am trying to upload a lot of files from S3 to IPFS via Pinata. I haven't found in Pinata documentation something like that.
This is my solution, using the form-data library. I haven't tested it yet (I will do it soon, I need to code some things).
Is it a correct approach? anyone who has done something similar?
async uploadImagesFolder(
items: ItemDocument[],
bucket?: string,
path?: string,
) {
try {
const form = new FormData();
for (const item of items) {
const file = getObjectStream(item.tokenURI, bucket, path);
form.append('file', file, {
filename: item.tokenURI,
});
}
console.log(`Uploading files to IPFS`);
const pinataOptions: PinataOptions = {
cidVersion: 1,
};
const result = await pinata.pinFileToIPFS(form, {
pinataOptions,
});
console.log(`PiƱata Response:`, JSON.stringify(result, null, 2));
return result.IpfsHash;
} catch (e) {
console.error(e);
}
}
I had the same problem
So, I have found this: https://medium.com/pinata/stream-files-from-aws-s3-to-ipfs-a0e23ffb7ae5
But in the article If am not wrong, is used a different version to the JavaScript AWS SDK v3 (nowadays the most recent: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html).
This is for the Client side with TypeScript:
If you have this version, for me works this code snippet:
export const getStreamObjectInAwsS3 = async (data: YourParamsType) => {
try {
const BUCKET = data.bucketTarget
const KEY = data.key
const client = new S3Client({
region: 'your-region',
credentials: {
accessKeyId: 'your-access-key',
secretAccessKey: 'secret-key'
}
})
const resource = await client.send(new GetObjectCommand({
Bucket: BUCKET,
Key: KEY
}))
const response = resource.Body
if (response) {
return new Response(await response.transformToByteArray()).blob()
}
return null
} catch (error) {
return null
}
}
With the previous code, you can get the Blob Object for pass it to the File object with this method and get the URL resource using the API:
export const uploadFileToIPFS = async(file: Response) => {
const url = `https://api.pinata.cloud/pinning/pinFileToIPFS`
const data = new FormData()
data.append('file', file)
try {
const response = await axios.post(url, data, {
maxBodyLength: Infinity,
headers: {
pinata_api_key: 'your-api',
pinata_secret_api_key: 'your-secret'
},
data: data
})
return {
success: true,
pinataURL: `https://gateway.pinata.cloud/ipfs/${ response.data.IpfsHash }`
}
} catch (error) {
console.log(error)
return null
}
}
I have found this solution from this nice article and you can explore other implementations (including the Node.js side)

How to upload file from URL to S3 bucket (aws-sdk-js v3)?

I've tried the below code with no luck, which showing this error message:
(node:88634) UnhandledPromiseRejectionWarning: NotImplemented: A header you provided implies functionality that is not implemented
import { fromIni } from '#aws-sdk/credential-provider-ini'
import { S3Client, PutObjectCommand } from '#aws-sdk/client-s3'
import https from 'https'
import { Readable } from 'stream'
import awsConfig from './aws-exports'
const s3Client = new S3Client({
credentials: fromIni({ profile: 'default' }),
region: 'ap-northeast-1',
})
async function getFileFromUrl(url: string): Promise<Readable> {
return new Promise((resolve) => {
https.get(url, (response) => {
resolve(response)
})
})
}
async function upload(file: Readable) {
const uploadCommand = new PutObjectCommand({
Bucket: awsConfig.aws_user_files_s3_bucket,
Key: 'test.jpg',
Body: file,
ACL: 'public-read',
})
await s3Client.send(uploadCommand)
}
async function migrate() {
const file = await getFileFromUrl(
'https://example.com/logo.png'
)
await upload(file)
console.log('done')
}
migrate()
I could confirm that it's ok if I change the Body to a string...
Does anyone know how to do this correctly?
Thanks!
The problem here is that your getFileFromUrl function does not work, and AWS doesn't know how to handle the object you are handing it. You need to wait for the data event from https, like this:
async function getFileFromUrl (url) {
return new Promise((resolve) => {
https.get(url, (response) => {
response.on('data', (d) => {
resolve(d)
})
})
})
}

Uploading photo from react native(expo) camera to aws s3 after resized by lambda

Progress until now
I have created an app.
captured image from app has a object like this
`
{
"height": 4000,
"uri": "file:///data/user/0/host.exp.exponent/cache/ExperienceData/%2540anonymous%252Fimage-upload-4ceaf845-d42a-4a1d-b1be-5a3f9cfd10ba/Camera/e3d441b6-2664-4c30-a093-35f7f0f09488.jpg",
"width": 3000,
}
`
I have made an end point in api gateway and api gateway connects to a lambda and i'm sending above data to my backend as `
axios
.post(uploadEndPoint, capturedImage, {
headers: {
'Content-Type': 'application/json',
},
})
.then((response) => {
console.log(response);
})
.catch((error) => {
console.log(error.response);
});
`
i get captured data in my lambda
first i'm trying to save in s3 with out resizing like this if this works then i will go for resizing.
`
import Responses from '../utils/responses';
import * as fileType from 'file-type';
import { v4 as uuid } from 'uuid';
import * as AWS from 'aws-sdk';
const s3 = new AWS.S3();
const allowedMimes = ['image/jpeg', 'image/png', 'image/jpg'];
export const handler = async (event) => {
console.log('event>>', JSON.parse(event.body));
try {
const body = JSON.parse(event.body);
if (!body) {
return Responses._400({ message: 'incorrect body on request' });
}
let imageData = body;
console.log('imageData>>', imageData);
const buffer = Buffer.from(imageData, 'base64');
console.log('buffer>>', buffer);
const fileInfo = await fileType.fromBuffer(buffer);
console.log(fileInfo);
const detectedExt = fileInfo.ext;
const detectedMime = fileInfo.mime;
console.log('detectedExt>>', detectedExt);
console.log('detectedMime>>', detectedMime);
if (!allowedMimes.includes(detectedMime)) {
return Responses._400({ message: 'mime is not allowed ' });
}
const name = uuid();
const key = `${name}.${detectedExt}`;
console.log(`writing image to bucket called ${key}`);
await s3
.putObject({
Body: buffer,
Key: key,
ContentType: detectedMime,
Bucket: process.env.imageUploadBucket,
ACL: 'public-read',
})
.promise();
const url = `https://${process.env.imageUploadBucket}.s3-${process.env.region}.amazonaws.com/${key}`;
return Responses._200({
imageURL: url,
});
} catch (error) {
console.log('error', error);
return Responses._400({
message: error.message || 'failed to upload image',
});
}
};
`
fileInfo shows undifined
Things i want
Resize image and upload to s3 by that lambda

MERN and Amazon-s3 for file upload

How to post a file to Amazon S3 using node and react and save it path to mongoDB. with mongoose and formidable.
private async storeFile(file: { buffer: Buffer, fileId: string }): Promise<string> {
try {
const awsConfig = new AWS.Config(storageConfig);
const s3 = new AWS.S3(awsConfig);
let storageLink = undefined;
fs.readFile(file.buffer, (err, data) => {
if (err) {
throw err;
}
const params = {
Bucket:storageConfig.s3Bucket,
Key: `${storageConfig.s3Prefix}${file.fileId}`,
Body: data,
};
s3.upload(params, (s3Err: Error, s3Data: AWS.S3.ManagedUpload.SendData) => {
if (s3Err) {
throw s3Err;
}
storageLink = s3Data.Location;
});
});
return storageLink;
} catch (error) {
throw error;
}
}
In your Service file where you wanna call this function, update with record in collection
const storageLink = this.storeFile({ buffer, fileId });
const file = await file.updateOne({ _id: fileId }, {
status: fileStatus.UPLOADED, // just a flag
fileId: storageLink,
});