import automatically s3 bucket data in DynamoDB - amazon-s3

How to import s3 bucket JSON data in DynamoDB automatically using NODEJS, DynamoDB, and AWS lambda.

import type { AWS } from '#serverless/typescript';
const serverlessConfiguration: AWS = {
service: 'raj',
frameworkVersion: '2',
custom: {
webpack: {
webpackConfig: './webpack.config.js',
includeModules: true,
},
},
plugins: ['serverless-webpack'],
provider: {
name: 'aws',
runtime: 'nodejs14.x',
profile : 'server',
apiGateway: {
minimumCompressionSize: 1024,
shouldStartNameWithService: true,
},
environment: {
AWS_NODEJS_CONNECTION_REUSE_ENABLED: '1',
},
lambdaHashingVersion: '20201221',
},
// import the function via paths
functions: {
messageAdd : {
handler : "src/now.handler",
events: [
{
http: {
path : 'addData',
method : 'POST',
cors : true,
}
}
]
}
},
};
module.exports = serverlessConfiguration;

const AWS = require('aws-sdk') ;
const docClient = new AWS.DynamoDB.DocumentClient();
// The Lambda handler
exports.handler = async (event) => {
AWS.config.update({
region: 'us-east-1', // use appropriate region
accessKeyId: '', // use your access key
secretAccessKey: '' // user your secret key
});
const s3 = new AWS.S3();
const ddbTable = "s3todyb";
console.log (JSON.stringify(event, null, 2));
console.log('Using DDB table: ', ddbTable);
await Promise.all(
event.Records.map(async (record) => {
try {
console.log('Incoming record: ', record);
// Get original text from object in incoming event
const originalText = await s3.getObject({
Bucket: event.Records[0].s3.bucket.name,
Key: event.Records[0].s3.object.key
}).promise();
// Upload JSON to DynamoDB
const jsonData = JSON.parse(originalText.Body.toString('utf-8'));
await ddbLoader(jsonData);
} catch (err) {
console.error(err);
}
})
);
};
// Load JSON data to DynamoDB table
const ddbLoader = async (data) => {
// Separate into batches for upload
let batches = [];
const BATCH_SIZE = 25;
while (data.length > 0) {
batches.push(data.splice(0, BATCH_SIZE));
}
console.log(`Total batches: ${batches.length}`);
let batchCount = 0;
// Save each batch
await Promise.all(
batches.map(async (item_data) => {
// Set up the params object for the DDB call
const params = {
RequestItems: {}
};
params.RequestItems[ddbTable] = [];
item_data.forEach(item => {
for (let key of Object.keys(item)) {
// An AttributeValue may not contain an empty string
if (item[key] === '')
delete item[key];
}
// Build params
params.RequestItems[ddbTable].push({
PutRequest: {
Item: {
...item
}
}
});
});
// Push to DynamoDB in batches
try {
batchCount++;
console.log('Trying batch: ', batchCount);
const result = await docClient.batchWrite(params).promise();
console.log('Success: ', result);
} catch (err) {
console.error('Error: ', err);
}
})
);
};

Related

Material ui dropzone S3 uploader

Pardon me if this has been asked before. I am new to react and I'm developing a feature to upload files to S3 bucket using https://yuvaleros.github.io/material-ui-dropzone/.
Can some one help me out with how this should be done.
I am calling my getPresignedUrl method by using the onDrop method provided by the library but I am unable to figure out how to upload the actual file to S3?
export default function UploadFiles(props) {
const { formData, handleChange } = props;
const classes = useStyles();
const uploadFiles = (fileName) => {
api.uploadFiles(fileName).then((res) => {
const { statusCode } = res.data;
if (statusCode === 200) {
//do something
// setSnackbar({
// ...snackbar,
// ...{
// show: true,
// message: `Success`,
// type: "success",
// },
// });
} else {
console.log("this errored out");
//do something
}
});
}
return (
<React.Fragment>
<div className={classes.dropzonePreviewHeader}>
<DropzoneArea
showPreviews={true}
showPreviewsInDropzone={false}
useChipsForPreview
previewGridProps={{container: { spacing: 1, direction: 'row' }}}
previewChipProps={{classes: { root: classes.previewChip } }}
previewText="Selected files"
onDrop={e => {
e.forEach(item => uploadFiles(item.name));
}}
/></div>
</React.Fragment>
);
}
There's an existing library which does this: here
Sample code from read me:
import S3 from 'aws-s3';
const config = {
bucketName: 'myBucket',
dirName: 'photos', /* optional */
region: 'eu-west-1',
accessKeyId: 'ANEIFNENI4324N2NIEXAMPLE',
secretAccessKey: 'cms21uMxçduyUxYjeg20+DEkgDxe6veFosBT7eUgEXAMPLE',
s3Url: 'https://my-s3-url.com/', /* optional */
}
const S3Client = new S3(config);
/* Notice that if you don't provide a dirName, the file will be automatically uploaded to the root of your bucket */
/* This is optional */
const newFileName = 'my-awesome-file';
S3Client
.uploadFile(file, newFileName)
.then(data => console.log(data))
.catch(err => console.error(err))
/**
* {
* Response: {
* bucket: "your-bucket-name",
* key: "photos/image.jpg",
* location: "https://your-bucket.s3.amazonaws.com/photos/image.jpg"
* }
* }
*/
});

Getting Error 500 (general-error) while uploading files in feathersjs

I am getting this error on Postman when I send request for uploading files on Feathersjs:
{
"name": "GeneralError",
"message": "ENOENT: no such file or directory, open 'public/uploads/pic'",
"code": 500,
"className": "general-error",
"data": {},
"errors": {}
}
My uploads.service.js:
const {Uploads} = require('./uploads.class');
const createModel = require('../../models/uploads.model');
const hooks = require('./uploads.hooks');
const multer = require('multer');
const storage = multer.diskStorage({
destination: (_req, _file, cb) => cb(null, 'public/uploads'), // where the files are being stored
filename: (_req, file, cb) => {
console.log(_req.body);
//cb(null, ${_req.body.name});
cb(null, `${_req.body.name}`); //
}, // getting the file name
});
const uploads = multer({
storage,
limits: {
fieldSize: 1e8,
fileSize: 1e7,
},
});
module.exports = function(app) {
const options = {
Model: createModel(app),
paginate: app.get('paginate'),
multi: true,
};
// Initialize our service with any options it requires
app.use(
'/uploads',
uploads.array('files'),
(req, _res, next) => {
const {method} = req;
if (method === 'POST' || method === 'PATCH') {
console.log(req.files);
console.log(req.body);
req.feathers.files = req.body.files;
const body = [];
for (const file of req.files)
body.push({
name: req.body.name,
newNameWithPath: file.path,
});
req.body = method === 'POST' ? body : body[0];
}
next();
},
new Uploads(options, app),
);
// Get our initialized service so that we can register hooks
const service = app.service('uploads');
service.hooks(hooks);
};
This is my uploads.model.js:
module.exports = function(app) {
const modelName = 'uploads';
const mongooseClient = app.get('mongooseClient');
const {Schema} = mongooseClient;
const schema = new Schema(
{
name: {type: String, required: true},
},
{
timestamps: true,
},
);
// This is necessary to avoid model compilation errors in watch mode
// see https://mongoosejs.com/docs/api/connection.html#connection_Connection-deleteModel
if (mongooseClient.modelNames().includes(modelName)) {
mongooseClient.deleteModel(modelName);
}
return mongooseClient.model(modelName, schema);
};
I really cannot figure out where exactly the problem is. According to me it is supposed to make the folder automatically when I upload the file.
I would really appreciate some help. Thank you in advance.
It was my own mistake. I made the uploads folder inside public folder myself and now it's working.

Request body is empty when submitting data using "form data"

When I update using raw JSON, it's working but when I use the form data it is not updating. the request body when using form data is an empty object. Why is this happening?
Here's my update code:
exports.updateProgram = catchAsync(async (req, res, next) => {
console.log('req ko body',req.body)
let doc = await Program.findByIdAndUpdate(req.params.id, req.body, { runValidators: true, new: true })
if (!doc) {
return next(new AppError('No document found with that ID', 404))
}
res.status(200).json({
status: 'success!',
data: { doc }
})
})
In Postman:
I am using multer, I actually pass the photo in req.body. Here's the code:
let multerStorage = multer.memoryStorage()
let multerFilter = (req, file, cb) => {
if (file.mimetype.split('/')[0] == 'image') {
cb(null, true)
} else {
cb(new AppError('Not an image!', 400), false)
}
}
let upload = multer({
storage: multerStorage,
fileFilter: multerFilter
})
exports.uploadPhotos = upload.fields([
{ name: 'abcd', maxCount: 10 },
{ name: 'photos', maxCount: 10 },
{name: 'photos3', maxCount: 10}
])
exports.resizePhotos = catchAsync(async (req, res, next) => {
// if (!req.files.photos || !req.files.abcd) return next()
if(req.files.abcd) {
req.body.abcd = []
await Promise.all(req.files.abcd.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/arpit/${filename}`)
req.body.abcd.push(filename)
})
)} else if(req.files.photos3) {
req.body.photos3 = []
await Promise.all(req.files.photos3.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/arpit/${filename}`)
req.body.photos3.push(filename)
})
)}
else if(req.files.photos) {
// console.log('codee here')
// } else if(req.body.photos) {
req.body.photos = []
console.log('req.files>>>', req.files)
await Promise.all(req.files.photos.map(async (file, i) => {
let filename = `tour-${Date.now()}-${i + 1}.jpeg`
await sharp(file.buffer)
.resize(500,500)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`public/img/programs/${filename}`)
req.body.photos.push(filename)
})
)
}
return next()
})
I'm importing in the routes file
Express (bodyParser) can't handle multipart form-data and that's why your code isn't working.
Take a look at multer, an express package. It is a middleware which provides the functionality you're looking for.
var cpUpload = upload.fields([{ name: 'avatar', maxCount: 1 }, { name: 'gallery', maxCount: 8 }]);
app.post('/cool-profile', cpUpload, function (req, res, next) {
// req.files is an object (String -> Array) where fieldname is the key, and the value is array of files
//
// e.g.
// req.files['avatar'][0] -> File
// req.files['gallery'] -> Array
//
// req.body will contain the text fields, if there were any
})
This might be help you. Quoted from https://www.npmjs.com/package/multer#readme

{"errors":[{"message":"Must provide query string."}]}

I want to upload image on by using graphql-upload package
This is my scheme file
const { GraphQLSchema, GraphQLObjectType, GraphQLBoolean } = require("graphql");
const { GraphQLUpload } = require("graphql-upload");
const schema = new GraphQLSchema({
mutation: new GraphQLObjectType({
name: "Mutation",
fields() {
return {
uploadImage: {
description: "Uploads an image.",
type: GraphQLBoolean,
args: {
image: {
description: "Image file.",
type: GraphQLUpload
}
},
async resolve(parent, { image }) {
const { filename, mimetype, createReadStream } = await image;
const stream = createReadStream();
// Promisify the stream and store the file, then…
return true;
}
}
};
}
})
});
This is my server file. When I run the file it gives error on the browser side. it showed: {"errors":[{"message":"Must provide query string."}]}.
const express = require("express");
const graphqlHTTP = require("express-graphql");
const { graphqlUploadExpress } = require("graphql-upload");
const schema = require("./schema");
express()
.use(
"/graphql",
graphqlUploadExpress({ maxFileSize: 10000000, maxFiles: 10 }),
graphqlHTTP({ schema })
)
.listen(3000);
**What am I doing wrong? **

upload an image to amazon s3 in react-native

I am trying to upload image to amazon s3,If possible can any one provide links /docs for how to upload to amazon s3, any help much appreciated
S3 options:
// this.state.s3options in YourComponent
{
"url": "https://yourapp.s3.eu-central-1.amazonaws.com",
"fields": {
"key": "cache/22d65141b48c5c44eaf93a0f6b0abc30.jpeg",
"policy": "eyJleHBpcm...1VDE0Mzc1OVoifV19",
"x-amz-credential": "AK...25/eu-central-1/s3/aws4_request",
"x-amz-algorithm": "AWS4-HMAC-SHA256",
"x-amz-date": "20161125T143759Z",
"x-amz-signature": "87863c360...b9b304bfe650"
}
}
Component:
class YourComponent extends Component {
// ...
// fileSource looks like: {uri: "content://media/external/images/media/13", isStatic: true}
async uploadFileToS3(fileSource) {
try {
var formData = new FormData();
// Prepare the formData by the S3 options
Object.keys(this.state.s3options.fields).forEach((key) => {
formData.append(key, this.state.s3options.fields[key]);
});
formData.append('file', {
uri: fileSource.uri,
type: 'image/jpeg',
});
formData.append('Content-Type', 'image/jpeg')
var request = new XMLHttpRequest();
request.onload = function(e) {
if (e.target.status === 204) {
// Result in e.target.responseHeaders.Location
this.setState({avatarSourceRemote: {uri: e.target.responseHeaders.Location}})
}
}.bind(this)
request.open('POST', this.state.s3options.url, true);
request.setRequestHeader('Content-type', 'multipart/form-data');
request.send(formData);
} catch(error) {
console.error(error);
}
}
// Example display the uploaded image
render() {
if (this.state.avatarSourceRemote) {
return (
<Image source={this.state.avatarSourceRemote} style={{width: 100, height: 100}} />
);
} else {
return (
<Text>No Image</Text>
);
}
}
}
This works for me
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
import AWS from 'aws-sdk';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'XXXXXXXXXX';
const IAM_USER_KEY = 'XXXXXXXXXX';
const IAM_USER_SECRET = 'XXXXXXXXXXXXXXX';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
utils.stopLoader();
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
};
This worked for me after a significant amount of trying over and over again...
I am also using a lambda function to serve me the link to post with.
The lambda function is just using getSignedUrl.
// Lambda Function
const AWS = require('aws-sdk')
AWS.config.update({
accessKeyId: {bucket_access},
secretAccessKey: {bucket_secret},
signatureVersion: 'v4',
region: {bucket_region}
})
const s3 = new AWS.S3()
exports.handler = async (event) => {
const URL = s3.getSignedUrl('putObject', {Bucket: {bucket_name},
// name of file name being placed in S3 Bucket
// event === metaData object
Key: `${event.{key}}/photo00`})
return URL
};
// React Native
const imagePreview = '{image_uri}'
const handleURL = async () => {
// metaData object
const obj = {
key: "meta_data"
}
const response = await fetch{{lambda_func_endpoint}, {
method: 'POST',
body: JSON.stringify(obj)
})
const json = await response.json();
return json
}
const handleUpload = async () => {
const URL = await handleURL()
const imageExt = imagePreview.split('.').pop()
// I have no idea why you are supposed to fetch before fetching...
// makes no sense. But it works. Lots of trying as I said.
let image = await fetch(imagePreview)
// I have no idea why it needs to be a blob in order
// to upload... makes no sense.
image = await image.blob()
await fetch(URL, {
method: 'PUT',
body: image,
headers: {
Accept: `image/${imageExt}`,
'Content-Type': `image/${imageExt}`
}
})
.then((res) => console.log(JSON.parse(JSON.stringify(res)).status))
.catch((err) => console.error(err))
}
Let me know what you guys think!