Image upload using react-admin - react-admin

I am new to react-admin. I am using react-admin to upload the file. I have following the step mentioned below in tutorial.
But after I submit the request...I see http trace as follow. I see blob link instead of Base64 image payload.
{
"pictures": {
"rawFile": {
"preview": "blob:http://127.0.0.1:3000/fedcd180-cdc4-44df-b8c9-5c7196788dc6"
},
"src": "blob:http://127.0.0.1:3000/fedcd180-cdc4-44df-b8c9-5c7196788dc6",
"title": "Android_robot.png"
}
}
Can someone please advice how to get base64 image payload instead of link?

Check to see if you have this handler, most likely you did not change the name of the resource posts to your:
const addUploadCapabilities = requestHandler => (type, resource, params) => {
if (type === 'UPDATE' && resource === 'posts') {

Create your custom dataProvider to convert picture to base64
import restServerProvider from 'ra-data-json-server';
const servicesHost = 'http://localhost:8080/api';
const dataProvider = restServerProvider(servicesHost);
const myDataProfider = {
...dataProvider,
create: (resource, params) => {
if (resource !== 'your-route' || !params.data.pictures) {
// fallback to the default implementation
return dataProvider.create(resource, params);
}
const myFile = params.data.pictures;
if ( !myFile.rawFile instanceof File ){
return Promise.reject('Error: Not a file...'); // Didn't test this...
}
return Promise.resolve( convertFileToBase64(myFile) )
.then( (picture64) => ({
src: picture64,
title: `${myFile.title}`
}))
.then( transformedMyFile => dataProvider.create(resource, {
...params,
data: {
...params.data,
myFile: transformedMyFile
}
}));
}
};
const convertFileToBase64 = file => new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file.rawFile);
reader.onload = () => resolve(reader.result);
reader.onerror = reject;
});
export default myDataProfider;
And get image data at your Server API
exports.create = (req, res) => {
if(req.body.myFile){
var file = req.body.myFile;
var fs = require('fs');
var data = file.src.replace(/^data:image\/\w+;base64,/, "");
var buf = Buffer.from(data, 'base64');
fs.writeFile(`upload/${file.title}`, buf, err => {
if (err) throw err;
console.log('Saved!');
});
}};

Related

stream s3 to dynamodb with fast-csv : not all data inserted

When a csv file is uploaded on my s3 bucket, my lambda will be triggered to insert my data into DynamoDB.
I need a stream because the file is too large to be downloaded as full object.
const batchWrite = async (clientDynamoDB, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const params = { RequestItems: ri };
await clientDynamoDB.batchWriteItem(params).promise();
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("data", (row) => {
stream.pause();
itemsToProcess.push(row);
if (itemsToProcess.length === sizeChunk) {
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
stream.resume();
});
itemsToProcess = [];
}
})
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on("end", () => {
stream.pause();
console.log("end");
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
resolve("OK");
});
});
});
}
module.exports.main = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const object = event.Records[0].s3;
const bucket = object.bucket.name;
const file = object.object.key;
const agent = new https.Agent({
keepAlive: true
});
const client = new AWS.DynamoDB({
httpOptions: {
agent
}
});
try {
//get Stream csv data
const stream = s3
.getObject({
Bucket: bucket,
Key: file
})
.createReadStream()
.on('error', (e) => {
console.log(e);
});
await runStreamPromiseAsync(stream, client);
} catch (e) {
console.log(e);
}
};
When my file is 1000 lines everything is inserted but when I have 5000 lines, my function insert only around 3000 lines and this number is random... Sometimes more sometimes less..
So I'd like to understand what am I missing here ?
I also read this article but to be honest even if you pause the second stream, the first one is still running.. So if someone have any ideas on how to do this, it would be greatly appreciated !
Thanks
I found out why It was not fully processed, it's because the callback of batchWriteItem can return unprocess Items. So I change the function batchWrite and also the runPromiseStreamAsync a little bit because i might not have all the items processed from itemsToProcess.
Anyway here is the full code :
const batchWrite = (client, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const items = { RequestItems: ri };
const processItemsCallback = function(err, data) {
return new Promise((resolve, reject) => {
if(!data || data.length === 0){
return resolve();
}
if(err){
return reject(err);
}
let params = {};
params.RequestItems = data.UnprocessedItems;
return client.batchWriteItem(params, processItemsCallback);
});
};
return client.batchWriteItem(items, processItemsCallback );
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
let arrayPromise = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on('data', data => {
itemsToProcess.push(data);
if(itemsToProcess.length === sizeChunk){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
itemsToProcess = [];
}
})
.on('end', () => {
if(itemsToProcess.length !== 0){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
}
resolve(Promise.all(arrayPromise).catch(e => {
reject(e)
}));
});
});
}

Cypress - check if the file is downloaded

I have a little problem with trying to check if a file is downloaded.
Button click generates a PDF file and starts its download.
I need to check if it works.
Can Cypress do this?
I would suggest you to have a look to the HTTP response body.
You can get the response with cy.server().route('GET', 'url').as('download') (check cypress documentation if you don't know these methods).
and catch the response to verify the body is not empty:
cy.wait('#download')
.then((xhr) => {
assert.isNotNull(xhr.response.body, 'Body not empty')
})
Or if you have a popup announcing success when the download went successfully, you can as well verify the existence of the popup:
cy.get('...').find('.my-pop-up-success').should('be.visible')
Best,
EDIT
Please note cy.server().route() may be deprecated:
cy.server() and cy.route() are deprecated in Cypress 6.0.0. In a future release, support for cy.server() and cy.route() will be removed. Consider using cy.intercept() instead.
According to the migration guide, this is the equivalent: cy.intercept('GET', 'url').as('download')
cypress/plugins/index.js
const path = require('path');
const fs = require('fs');
const downloadDirectory = path.join(__dirname, '..', 'downloads');
const findPDF = (PDFfilename) => {
const PDFFileName = `${downloadDirectory}/${PDFfilename}`;
const contents = fs.existsSync(PDFFileName);
return contents;
};
const hasPDF = (PDFfilename, ms) => {
const delay = 10;
return new Promise((resolve, reject) => {
if (ms < 0) {
return reject(
new Error(`Could not find PDF ${downloadDirectory}/${PDFfilename}`)
);
}
const found = findPDF(PDFfilename);
if (found) {
return resolve(true);
}
setTimeout(() => {
hasPDF(PDFfilename, ms - delay).then(resolve, reject);
}, delay);
});
};
module.exports = (on, config) => {
require('#cypress/code-coverage/task')(on, config);
on('before:browser:launch', (browser, options) => {
if (browser.family === 'chromium') {
options.preferences.default['download'] = {
default_directory: downloadDirectory,
};
return options;
}
if (browser.family === 'firefox') {
options.preferences['browser.download.dir'] = downloadDirectory;
options.preferences['browser.download.folderList'] = 2;
options.preferences['browser.helperApps.neverAsk.saveToDisk'] =
'text/csv';
return options;
}
});
on('task', {
isExistPDF(PDFfilename, ms = 4000) {
console.log(
`looking for PDF file in ${downloadDirectory}`,
PDFfilename,
ms
);
return hasPDF(PDFfilename, ms);
},
});
return config;
};
integration/pdfExport.spec.js
before('Clear downloads folder', () => {
cy.exec('rm cypress/downloads/*', { log: true, failOnNonZeroExit: false });
});
it('Should download my PDF file and verify its present', () => {
cy.get('ExportPdfButton').click();
cy.task('isExistPDF', 'MyPDF.pdf').should('equal', true);
});

Cannot upload image to s3 using serverless framework but it work in offline (buffer issue)

I'm trying to deploy a lambda function allowing me to upload a picture to S3.
The lambda works well in offline but when I'm deploy it to AWS, the function doesn't work.
The first error I encountered was this one :
ERROR (node:7) [DEP0005] DeprecationWarning: Buffer() is deprecated due to security and usability issues. Please use the Buffer.alloc(), Buffer.allocUnsafe(), or Buffer.from() methods instead.
So, I followed the recommendation to use Buffer.from() method instead. But it doesn't work too. The lambda run until the timeout.
Can someone tell me where I was wrong or suggest me another solution ?
Below my lambda function :
const AWS = require("aws-sdk");
const Busboy = require("busboy");
const uuidv4 = require("uuid/v4");
require("dotenv").config();
AWS.config.update({
accessKeyId: process.env.ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY,
subregion: process.env.SUB_REGION
});
const s3 = new AWS.S3();
const getContentType = event => {
// see the second block of codes
};
const parser = event => {
// see the third block of codes
};
module.exports.main = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const uuid = uuidv4();
const uploadFile = async (image, uuid) =>
new Promise(() => {
// const bitmap = new Buffer(image, "base64"); // <====== deprecated
const bitmap = Buffer.from(image, "base64"); // <======== problem here
const params = {
Bucket: "my_bucket",
Key: `${uuid}.jpeg`,
ACL: "public-read",
Body: bitmap,
ContentType: "image/jpeg"
};
s3.putObject(params, function(err, data) {
if (err) {
return callback(null, "ERROR");
}
return callback(null, "SUCCESS");
});
});
parser(event).then(() => {
uploadFile(event.body.file, uuid);
});
};
getContentType() :
const getContentType = event => {
const contentType = event.headers["content-type"];
if (!contentType) {
return event.headers["Content-Type"];
}
return contentType;
};
parser()
const parser = event =>
new Promise((resolve, reject) => {
const busboy = new Busboy({
headers: {
"content-type": getContentType(event)
}
});
const result = {};
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
file.on("data", data => {
result.file = data;
});
file.on("end", () => {
result.filename = filename;
result.contentType = mimetype;
});
});
busboy.on("field", (fieldname, value) => {
result[fieldname] = value;
});
busboy.on("error", error => reject(error));
busboy.on("finish", () => {
event.body = result;
resolve(event);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
});
new Buffer(number) // Old
Buffer.alloc(number) // New
new Buffer(string) // Old
Buffer.from(string) // New
new Buffer(string, encoding) // Old
Buffer.from(string, encoding) // New
new Buffer(...arguments) // Old
Buffer.from(...arguments) // New
You are using callbackWaitsForEmptyEventLoop which basically let lambda function thinks that the work is not over yet. Also, you are wrapping it in promise but not resolving it. You can simplify this logic using following inbuilt promise function on aws-sdk
module.exports.main = async event => {
const uuid = uuidv4();
await parser(event); // not sure if this needs to be async or not. check
const bitmap = Buffer.from(event.body.file, "base64"); // <======== problem here
const params = {
Bucket: "my_bucket",
Key: `${uuid}.jpeg`,
ACL: "public-read",
Body: bitmap,
ContentType: "image/jpeg"
};
const response = await s3.putObject(params).promise();
return response;
};

How to upload image to firebase using react native

I need way to upload image to firebase
i tried to use react-native-fetch-blob library
but I think there is something wrong with installing the library
No need to use react-native-fetch-blob. Here is how I do it on my project.
Install both react-native-firebase and react-native-image-picker. Follow the installation steps from their documentation guide.
Then implement 2 small functions to do image pick and upload to firebase. Here is the sample code.
// 1. Import required library
import firebase from 'react-native-firebase';
import ImagePicker from 'react-native-image-picker';
// 2. Create a function to pick the image
const pickImage = () => {
return new Promise((resolve, reject) => {
ImagePicker.showImagePicker(pickerOptions, response => {
if (response.didCancel) return;
if (response.error) {
const message = `An error was occurred: ${response.error}`;
reject(new Error(message));
return;
}
const { path: uri } = response;
resolve(uri);
});
});
};
// 3. Create a function to upload to firebase
const uploadImage = async (fileName, uri) {
return new Promise(
(resolve, reject) => {
firebase
.storage()
.ref(`uploads/${filename}`)
.putFile(uri)
.then(resolve)
.catch(reject);
}
);
}
Then simply firing both function as you need, here is the sample to pick and immediately upload it.
const pickImageAndUpload = async () => {
const uri = await pickImage();
const fileName = 'someImage.jpg';
const { state, downloadURL } = await uploadImage(fileName, uri);
}
async function uploadImageAsync(itemImage, passedParameter, ItemName, ItemDesc, ItemPrice, ItemWeight) {
const response = await fetch(itemImage);
const blob = await response.blob();
console.log("uri of the elements ius", blob)
var storageRef = firebase.storage().ref();
var file = blob
var metadata = {
contentType: 'image/jpeg',
};
const timeStamp = Date.now();
var uploadTask = storageRef.child('CategoryDescription' + "/" + `${passedParameter}` + "/" + `${ItemName}`).put(file, metadata);
//For image pick
pickImage = async () => {
const { CAMERA, CAMERA_ROLL } = Permissions;
const permissions = {
[CAMERA]: await Permissions.askAsync(CAMERA),
[CAMERA_ROLL]: await Permissions.askAsync(CAMERA_ROLL),
};
if (permissions[CAMERA].status === 'granted' && permissions[CAMERA_ROLL].status === 'granted') {
let result = await ImagePicker.launchImageLibraryAsync({
allowsEditing: false,
aspect:[4,3],
quality: 0.5,
});
// console.log(result);
if (!result.cancelled) {
this.setState({ itemImage: result.uri });
}
}

upload an image to amazon s3 in react-native

I am trying to upload image to amazon s3,If possible can any one provide links /docs for how to upload to amazon s3, any help much appreciated
S3 options:
// this.state.s3options in YourComponent
{
"url": "https://yourapp.s3.eu-central-1.amazonaws.com",
"fields": {
"key": "cache/22d65141b48c5c44eaf93a0f6b0abc30.jpeg",
"policy": "eyJleHBpcm...1VDE0Mzc1OVoifV19",
"x-amz-credential": "AK...25/eu-central-1/s3/aws4_request",
"x-amz-algorithm": "AWS4-HMAC-SHA256",
"x-amz-date": "20161125T143759Z",
"x-amz-signature": "87863c360...b9b304bfe650"
}
}
Component:
class YourComponent extends Component {
// ...
// fileSource looks like: {uri: "content://media/external/images/media/13", isStatic: true}
async uploadFileToS3(fileSource) {
try {
var formData = new FormData();
// Prepare the formData by the S3 options
Object.keys(this.state.s3options.fields).forEach((key) => {
formData.append(key, this.state.s3options.fields[key]);
});
formData.append('file', {
uri: fileSource.uri,
type: 'image/jpeg',
});
formData.append('Content-Type', 'image/jpeg')
var request = new XMLHttpRequest();
request.onload = function(e) {
if (e.target.status === 204) {
// Result in e.target.responseHeaders.Location
this.setState({avatarSourceRemote: {uri: e.target.responseHeaders.Location}})
}
}.bind(this)
request.open('POST', this.state.s3options.url, true);
request.setRequestHeader('Content-type', 'multipart/form-data');
request.send(formData);
} catch(error) {
console.error(error);
}
}
// Example display the uploaded image
render() {
if (this.state.avatarSourceRemote) {
return (
<Image source={this.state.avatarSourceRemote} style={{width: 100, height: 100}} />
);
} else {
return (
<Text>No Image</Text>
);
}
}
}
This works for me
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
import AWS from 'aws-sdk';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'XXXXXXXXXX';
const IAM_USER_KEY = 'XXXXXXXXXX';
const IAM_USER_SECRET = 'XXXXXXXXXXXXXXX';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
utils.stopLoader();
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
};
This worked for me after a significant amount of trying over and over again...
I am also using a lambda function to serve me the link to post with.
The lambda function is just using getSignedUrl.
// Lambda Function
const AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: {bucket_access},
secretAccessKey: {bucket_secret},
signatureVersion: 'v4',
region: {bucket_region}
})
const s3 = new AWS.S3()
exports.handler = async (event) => {
const URL = s3.getSignedUrl('putObject', {Bucket: {bucket_name},
// name of file name being placed in S3 Bucket
// event === metaData object
Key: `${event.{key}}/photo00`})
return URL
};
// React Native
const imagePreview = '{image_uri}'
const handleURL = async () => {
// metaData object
const obj = {
key: "meta_data"
}
const response = await fetch{{lambda_func_endpoint}, {
method: 'POST',
body: JSON.stringify(obj)
})
const json = await response.json();
return json
}
const handleUpload = async () => {
const URL = await handleURL()
const imageExt = imagePreview.split('.').pop()
// I have no idea why you are supposed to fetch before fetching...
// makes no sense. But it works. Lots of trying as I said.
let image = await fetch(imagePreview)
// I have no idea why it needs to be a blob in order
// to upload... makes no sense.
image = await image.blob()
await fetch(URL, {
method: 'PUT',
body: image,
headers: {
Accept: `image/${imageExt}`,
'Content-Type': `image/${imageExt}`
}
})
.then((res) => console.log(JSON.parse(JSON.stringify(res)).status))
.catch((err) => console.error(err))
}
Let me know what you guys think!