Downloading images form AWS S3 via Lambda and API Gateway--using fetch class - amazon-s3

I'm trying to use the JavaScript fetch API, AWS API Gateway, AWS Lambda, and AWS S3 to create a service that allows users to upload and download media. Server is using NodeJs 8.10; browser is Google Chrome Version 69.0.3497.92 (Official Build) (64-bit).
In the long term, allowable media would include audio, video, and images. For now, I'd be happy just to get images to work.
The problem I'm having: my browser-side client, implemented using fetch, is able to upload JPEG's to S3 via API Gateway and Lambda just fine. I can use curl or the S3 Console to download the JPEG from my S3 bucket, and then view the image in an image viewer just fine.
But, if I try to download the image via the browser-side client and fetch, I get nothing that I'm able to display in the browser.
Here's the code from the browser-side client:
fetch(
'path/to/resource',
{
method: 'post',
mode: "cors",
body: an_instance_of_file_from_an_html_file_input_tag,
headers: {
Authorization: user_credentials,
'Content-Type': 'image/jpeg',
},
}
).then((response) => {
return response.blob();
}).then((blob) => {
const img = new Image();
img.src = URL.createObjectURL(blob);
document.body.appendChild(img);
}).catch((error) => {
console.error('upload failed',error);
});
Here's the server-side code, using Claudia.js:
const AWS = require('aws-sdk');
const ApiBuilder = require('claudia-api-builder');
const api = new ApiBuilder();
api.corsOrigin(allowed_origin);
api.registerAuthorizer('my authorizer', {
providerARNs: ['arn of my cognito user pool']
});
api.get(
'/media',
(request) => {
'use strict';
const s3 = new AWS.S3();
const params = {
Bucket: 'name of my bucket',
Key: 'name of an object that is confirmed to exist in the bucket and to be properly encoded as and readable as a JPEG',
};
return s3.getObject(params).promise().then((response) => {
return response.Body;
})
;
}
);
module.exports = api;
Here are the initial OPTION request and response headers in Chrome's Network Panel:
Here's the consequent GET request and response headers:
What's interesting to me is that the image size is reported as 699873 (with no units) in the S3 Console, but the response body of the GET transaction is reported in Chrome at roughly 2.5 MB (again, with no units).
The resulting image is a 16x16 square and dead link. I get no errors or warnings whatsoever in the browser's console or CloudWatch.
I've tried a lot of things; would be interested to hear what anyone out there can come up with.
Thanks in advance.
EDIT: In Chrome:

Claudia requires that the client specify which MIME type it will accept on binary payloads. So, keep the 'Content-type' config in the headers object client-side:
fetch(
'path/to/resource',
{
method: 'post',
mode: "cors",
body: an_instance_of_file_from_an_html_file_input_tag,
headers: {
Authorization: user_credentials,
'Content-Type': 'image/jpeg', // <-- This is important.
},
}
).then((response) => {
return response.blob();
}).then((blob) => {
const img = new Image();
img.src = URL.createObjectURL(blob);
document.body.appendChild(img);
}).catch((error) => {
console.error('upload failed',error);
});
Then, on the server side, you need to tell Claudia that the response should be binary and which MIME type to use:
const AWS = require('aws-sdk');
const ApiBuilder = require('claudia-api-builder');
const api = new ApiBuilder();
api.corsOrigin(allowed_origin);
api.registerAuthorizer('my authorizer', {
providerARNs: ['arn of my cognito user pool']
});
api.get(
'/media',
(request) => {
'use strict';
const s3 = new AWS.S3();
const params = {
Bucket: 'name of my bucket',
Key: 'name of an object that is confirmed to exist in the bucket and to be properly encoded as and readable as a JPEG',
};
return s3.getObject(params).promise().then((response) => {
return response.Body;
})
;
},
/** Add this. **/
{
success: {
contentType: 'image/jpeg',
contentHandling: 'CONVERT_TO_BINARY',
},
}
);
module.exports = api;

Related

Downloading blob (zip) from one endpoint and uploading to different endpoint

I'm trying to download a zip from one endpoint and upload to another from a FE VueJS app, but it ends up corrupted on upload. I can do it with fileSaver but was hoping to skip the intermediate step of dropping it onto a HDD. If I download and POST it with Postman it works fine, so I suspect there's an issue with the responseType or blob type etc, but there's a lot of combinations & permutations. cURL works fine as well, but obviously not applicable here.
This is the code so far, the fetch code/config is from Postman, but how the uploaded file is stored/represented in Postman is opaque. The zipEndpointUp is an endpoint that consumes the file but it returns 'invalid archive'. localhost:8080 is proxied to the actual server to avoid CORs issues.
axios.get("http://localhost:8080/zipDirDown/download.zip, {
headers: {
Authorization: "Basic xxx",
mode: "no-cors",
responseType: 'arraybuffer',
}
}).then(res => {
const blob = new Blob([res.data], {type: "octet/stream"});
let myHeaders = new Headers();
myHeaders.append("Authorization", "Basic xxx");
let formData = new FormData();
formData.append("file", blob, "newZipFile.zip");
formData.append("name", "newZipFile Name");
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: formData,
redirect: 'follow'
};
fetch("http://localhost:8080/zipEndpointUp", requestOptions)
.then(response =>
response.text())
.then(result =>
console.log(result))
.catch(error =>
console.log('error', error));
})
So it turns out I needed to await the res promise (and change the Blob type):
fetch("http://localhost:8080/zipDirDown/download.zip, {
headers: {
Authorization: "Basic xxx",
responseType: 'arraybuffer',
}
}).then(res => {
const asyncBlob = await res.blob();
const blob = new Blob([asyncBlob], {type: "application/zip"});
})

Get uploaded object URL with Javascript 'aws-sdk' v3

Currently, we are using aws-sdk v2, and extracting uploaded file URL in this way
const res = await S3Client
.upload({
Body: body,
Bucket: bucket,
Key: key,
ContentType: contentType,
})
.promise();
return res.Location;
Now we have to upgrade to aws-sdk v3, and the new way to upload files looks like this
const command = new PutObjectCommand({
Body: body,
Bucket: bucket,
Key: key,
ContentType: contentType,
});
const res = await S3Client.send(command);
Unfortunately, res object doesn't contain Location property now.
getSignedUrl SDK function doesn't look suitable because it just generates a URL with an expiration date (probably it can be set to some extra huge duration, but anyway, we still need to have a possibility to analyze the URL path)
Building the URL manually does not look like a good idea and a stable solution to me.
Answering myself: I don't know whether a better solution exists, but here is how I do it
const command = new PutObjectCommand({
Body: body,
Bucket: bucket,
Key: key,
ContentType: contentType,
});
const [res, region] = await Promise.all([
s3Client.send(command),
s3Client.config.region(),
]);
const url = `https://${bucket}.s3.${region}.amazonaws.com/${key}`
You can use Upload method from "#aws-sdk/lib-storage" with sample code as below.
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}
Make sure you return parallelUploads3.done() object where you will get location in the return object as below
S3 Upload Response
Reference
https://stackoverflow.com/a/70159394/16729176

Uploading image - data appears like this "���"�!1A"Qaq��2��B�#" and image is blank - Next.js application upload to DigitalOcean Spaces / AWS S3

I am trying to let my users upload photos in a Next.js application.
I set up a remote database and I am writing to the database properly, but the images are appearing blank. I'm thinking it must be a problem with the format of the data coming in.
Here is my code on the front end in React:
async function handleProfileImageUpload(e) {
const file = e.target.files[0];
await fetch('/api/image/profileUpload', {
method: 'POST',
body: file,
'Content-Type': 'image/jpg',
})
.then(res => {
console.log('final:', res);
})
};
return (
<label htmlFor="file-upload">
<div>
<img src={profileImage} className="profile-image-lg dashboard-profile-image"/>
<div id="dashboard-image-hover" >Upload Image</div>
</div>
</label>
<input id="file-upload" type="file" onChange={handleProfileImageUpload}/>
)
The "file" I declare above (const file = e.target.files[0]) appears like this on console.log(file):
+ --------++-+-++-+------------+----++-+--7--7----7-���"�!1A"Qaq��2��B�#br���$34R����CSst���5����)!1"AQaq23B����
?�#��P�n�9?Y�
ޞ�p#��zE� Nk�2iH��l��]/P4��JJ!��(�#�r�Mң[ ���+���PD�HVǵ�f(*znP�>�HRT�!W��\J���$�p(Q�=JF6L�ܧZ�)�z,[�q��� *
�i�A\5*d!%6T���ͦ�#J{6�6��
k#��:JK�bꮘh�A�%=+E q\���H
q�Q��"�����B(��OЛL��B!Le6���(�� aY
�*zOV,8E�2��IC�H��*)#4է4.�ɬ(�<5��j!§eR27��
��s����IdR���V�u=�u2a��
... and so on. It's long.
I am uploading to Digital Ocean's Spaces object storage, which interfaces with AWS S3. Again, my application is written in Next.js and I am using a serverless environment.
Here is the API route I am sending it to ('/api/image/profileUpload.js'):
import AWS from 'aws-sdk';
export default async function handler(req, res) {
// get the image data
let image = req.body;
// create S3 instance with credentials
const s3 = new AWS.S3({
endpoint: new AWS.Endpoint('nyc3.digitaloceanspaces.com'),
accessKeyId: process.env.SPACES_KEY,
secretAccessKey: process.env.SPACES_SECRET,
region: 'nyc3',
});
// create parameters for upload
const uploadParams = {
Bucket: 'oscarexpert',
Key: 'asdff',
Body: image,
ContentType: "image/jpeg",
ACL: "public-read",
};
// execute upload
s3.upload(uploadParams, (err, data) => {
if (err) return console.log('reject', err)
else return console.log('resolve', data)
})
// returning arbitrary object for now
return res.json({});
};
When I console.log(image), it shows the same garbled string that I posted above, so I know it's getting the same exact data. Maybe this needs to be further parsed?
The code above is directly from a Digital Ocean tutorial but catered to my environment. I am taking note of the "Body" parameter, which is where the garbled string is being passed in.
What I've tried:
Stringifying the "image" before passing it to the Body param
Using multer-s3 to process the request on the backend
Requesting through Postman (the image comes in with the exact same garbled format)
I've spent days on this issue. Any guidance would be much appreciated.
Figured it out. I wasn't encoding the image properly in my Next.js serverless backend.
First, on the front end, I made my fetch request like this. It's important to put it in the "form" format for the next step in the backend:
async function handleProfileImageUpload(e) {
const file = e.target.files[0];
const formData = new FormData();
formData.append('file', file);
// CHECK THAT THE FILE IS PROPER FORMAT (size, type, etc)
let url = false;
await fetch(`/api/image/profileUpload`, {
method: 'POST',
body: formData,
'Content-Type': 'image/jpg',
})
}
There were several components that helped me finally do this on the backend, so I am just going to post the code I ended up with. Here's the API route:
import AWS from 'aws-sdk';
import formidable from 'formidable-serverless';
import fs from 'fs';
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
// create S3 instance with credentials
const s3 = new AWS.S3({
endpoint: new AWS.Endpoint('nyc3.digitaloceanspaces.com'),
accessKeyId: process.env.SPACES_KEY,
secretAccessKey: process.env.SPACES_SECRET,
region: 'nyc3',
});
// parse request to readable form
const form = new formidable.IncomingForm();
form.parse(req, async (err, fields, files) => {
// Account for parsing errors
if (err) return res.status(500);
// Read file
const file = fs.readFileSync(files.file.path);
// Upload the file
s3.upload({
// params
Bucket: process.env.SPACES_BUCKET,
ACL: "public-read",
Key: 'something',
Body: file,
ContentType: "image/jpeg",
})
.send((err, data) => {
if (err) {
console.log('err',err)
return res.status(500);
};
if (data) {
console.log('data',data)
return res.json({
url: data.Location,
});
};
});
});
};
If you have any questions feel free to leave a comment.

How to upload local device image using Axios to S3 bucket

I need to upload an image directly to an S3 bucket. I am using react native, and react-native-image-picker to select a photo. This returns a local image uri. Here is my code right now.
ImagePicker.showImagePicker(options, response => {
var bodyFormData = new FormData(); // If I don't use FormData I end up
// uploading the json not an image
bodyFormData.append('image', {
uri: response.uri, // uri rather than data to avoid loading into memory
type: 'image/jpeg'
});
const uploadImageRequest = {
method: 'PUT',
url: presignedS3Url,
body: bodyFormData,
headers: {
'Content-Type: 'multipart/form-data'
}
};
axios(uploadImageRequest);
});
This almost works.. when I check my S3 bucket I have a file thats nearly an image. It has the following format
--Y_kogEdJ16jhDUS9qhn.KjyYACKZGEw0gO-8vPw3BcdOMIrqVtmXsdJOLPl6nKFDJmLpvj^M
content-disposition: form-data; name="image"^M
content-type: image/jpeg^M
^M
<Image data>
If I manually go in and delete the header, then I have my image! However, I need to be uploading an image directly to S3, which clients will be grabbing and expecting to already be in a proper image format.
I can make this work using response.data and decoding to a string and uploading that directly, but for the sake of memory I'd rather not do this.
Upload image to S3 from client using AJAX with presigned URL
It's been a while since you posted your question so I guess you already found a solution, but anyway... I was trying to do the same, i.e. upload an image to S3 using axios, but I just wasn't able to make it work properly. Fortunately, I found out that we can easily do the trick with plain AJAX:
const xhr = new XMLHttpRequest();
xhr.open('PUT', presignedS3Url);
xhr.onreadystatechange = function() {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
console.log('Image successfully uploaded to S3');
} else {
console.log('Error while sending the image to S3.\nStatus:', xhr.status, "\nError text: ", xhr.responseText);
}
}
}
xhr.setRequestHeader('Content-Type', 'image/jpeg');
xhr.send({ uri: imageUri, type: 'image/jpeg', name: fileName});
This code is taken from this really useful article which borrows from this blog.

Direct Upload to S3 from the browser with Authorization Signature Ver 4

I need to upload a file to S3 directly from the browser. In the beginning I created a script that is working but to authorize I need to put my credentials accessKeyId and secretAccessKey, what it is not secure.
I figured out that I can use for authorization the "Authorization Signature"
It seems great but I can't find where I can put this authorization header to the request in the upload() method.
An example of my authorization header:
Authorization: AWS4-HMAC-SHA256
Credential=/20151016//s3/aws4_request,
SignedHeaders=content-type;host;x-amz-date,
Signature=4eee344a71a58623febc4079024a27cb62f3d26546695422244fcefe50d0168d
Thanks for your advice.
I have found solution for this issue. My solution is based on example from this site.
In final solution I don't use javascript SDK, it is using post form with authorization inputs what is sending with post parameters.
You can enclose a signed policy document with your POST request in order to authenticate securely, with AWS Signature Version 4.
If you're on Node, you can use the aws-s3-form package on the server to generate the necessary form data your client requires in order to send a successful request to S3.
You might want to read my blog post on the subject for full insight.
Example Server Side Code (Node)
let AwsS3Form = require('aws-s3-form')
[...]
// A hapi.js server route
server.route({
method: ['GET',],
path: '/api/s3Settings',
config: {
auth: 'session',
handler: (request, reply) => {
let {key,} = request.query
let keyPrefix = `u/${request.auth.credentials.username}/`
let region = process.env.S3_REGION
let s3Form = new AwsS3Form({
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region,
bucket,
keyPrefix,
successActionStatus: 200,
})
let url = `https://s3.${region}.amazonaws.com/${bucket}/${keyPrefix}${key}`
let formData = s3Form.create(key)
reply({
bucket,
region,
url,
fields: formData.fields,
})
},
},
})
Example Client Side Code
let R = require('ramda')
let ajax = require('./ajax')
class S3Uploader {
constructor({folder,}) {
this.folder = folder
}
send(file) {
let key = `${this.folder}/${file.name}`
return ajax.getJson(`s3Settings`, {key,})
.then((s3Settings) => {
let formData = new FormData()
R.forEach(([key, value,]) => {
formData.append(key, value)
}, R.toPairs(s3Settings.fields))
formData.append('file', file)
return new Promise((resolve, reject) => {
let request = new XMLHttpRequest()
request.onreadystatechange = () => {
if (request.readyState === XMLHttpRequest.DONE) {
if (request.status === 200) {
resolve(s3Settings.url)
} else {
reject(request.responseText)
}
}
}
let url = `https://s3.${s3Settings.region}.amazonaws.com/${s3Settings.bucket}`
request.open('POST', url, true)
request.send(formData)
})
}, (error) => {
throw new Error(`Failed to receive S3 settings from server`)
})
}
}