In a lambda function with the event: s3:ObjectCreated:*, calling head object on the created object returns a NotFound error.
module.exports.handler = async function(event, context, callback) {
try {
const Bucket = event.Records[0].s3.bucket.name;
const Key = event.Records[0].s3.object.key;
console.log('Bucket', Bucket);
console.log('Key', Key);
const objectHead = await s3.headObject({ Bucket, Key }).promise();
console.log('Alas! I will never discover that the objectHead is:', objectHead);
callback();
} catch(err) {
console.error('Error', err);
callback(err);
}
}
And this is the error I get:
{
NotFound: null
message: null,
code: 'NotFound',
region: null,
time: 2018-02-19T11:06:35.894Z,
requestId: 'XXXXXXXXXXX',
extendedRequestId: 'XXX.....XXX',
cfId: undefined,
statusCode: 404,
retryable: false,
retryDelay: 77.24564264820208
}
I've noticed that it says region null in the error. I suspect this is irrelevant as I'm 99% sure I'm setting it correctly:
const s3 = new AWS.S3({
region: 'us-east-1'
});
Here's the serverless.yml function declaration in case anybody's curious:
obj_head:
handler: obj_head.handler
events:
- s3:
bucket: ${self:provider.environment.BUCKET_NAME}
event: s3:ObjectCreated:*
role: arn:aws:iam::XXXXXXXXX:role/RoleWithAllS3PermissionsEver
And here is a sample for a received event:
{
"Records": [
{
"eventVersion": "2.0",
"eventSource": "aws:s3",
"awsRegion": "us-east-1",
"eventTime": "2018-02-19T11:03:46.761Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
"principalId": "AWS:XXX"
},
"requestParameters": {
"sourceIPAddress": "X.X.X.X"
},
"responseElements": {
"x-amz-request-id": "X",
"x-amz-id-2": "X/X/X"
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "14122133-28e8-4cd9-907c-af328334c56b",
"bucket": {
"name": "BUCKET_NAME",
"ownerIdentity": {
"principalId": "X"
},
"arn": "arn:aws:s3:::BUCKET_NAME"
},
"object": {
"key": "input.key",
"size": X,
"eTag": "X",
"sequencer": "X"
}
}
}
]
}
It's puzzling that the object head isn't found though the very event that triggered the function is the object's creation.
Am I doing something wrong?
Any thoughts on where to look?
The object keyname value is URL encoded, and this was causing the issue. This behaviour is documented here:
The s3 key provides information about the bucket and object involved
in the event. Note that the object keyname value is URL encoded. For
example "red flower.jpg" becomes "red+flower.jpg".
When dealing with filenames that contains Unicode characters please see this answer from Alastair McCormack:
You need to convert the URL encoded Unicode string to a bytes str
before un-urlparsing it and decoding as UTF-8.
Related
I'm using kinesis delivery stream to send stream, from event bridge to s3 bucket. But i can't seem to find which class have the option to configure dynamic partitioning?
this is my code for delivery stream:
new CfnDeliveryStream(this, `Export-delivery-stream`, {
s3DestinationConfiguration: {
bucketArn: bucket.bucketArn,
roleArn: kinesisFirehoseRole.roleArn,
prefix: `test/!{timestamp:yyyy/MM/dd}/`
}
});
I have been working on the same issue for a few days, and have finally gotten something to work. Here is an example of how it can be implemented in CDK. In short, the partitioning has to be enables as you have done, but you need to set the key and .jq expression in the so-called processingConfiguration.
Our incomming json data looks something like this:
{
"data":
{
"timestamp":1633521266990,
"defaultTopic":"Topic",
"data":
{
"OUT1":"Inactive",
"Current_mA":3.92
}
}
}
The CDK code looks as following:
const DeliveryStream = new CfnDeliveryStream(this, 'deliverystream', {
deliveryStreamName: 'deliverystream',
extendedS3DestinationConfiguration: {
cloudWatchLoggingOptions: {
enabled: true,
},
bucketArn: Bucket.bucketArn,
roleArn: deliveryStreamRole.roleArn,
prefix: 'defaultTopic=!{partitionKeyFromQuery:defaultTopic}/!{timestamp:yyyy/MM/dd}/',
errorOutputPrefix: 'error/!{firehose:error-output-type}/',
bufferingHints: {
intervalInSeconds: 60,
},
dynamicPartitioningConfiguration: {
enabled: true,
},
processingConfiguration: {
enabled: true,
processors: [
{
type: 'MetadataExtraction',
parameters: [
{
parameterName: 'MetadataExtractionQuery',
parameterValue: '{defaultTopic: .data.defaultTopic}',
},
{
parameterName: 'JsonParsingEngine',
parameterValue: 'JQ-1.6',
},
],
},
{
type: 'AppendDelimiterToRecord',
parameters: [
{
parameterName: 'Delimiter',
parameterValue: '\\n',
},
],
},
],
},
},
})
I have a problem sending a picture to a server, that's like the default approach, but it does not seem to work.
var source = '/Users/alexx/Library/Developer/CoreSimulator/Devices/44F0FA92-4898-4CFB-862E-4E5EC4C8AB28/data/Containers/Bundle/Application/34BCE695-4B4F-472F-AB5C-F2336AC45273/DoorLock.app/123.jpg';
const form = new FormData();
form.append('image', {
uri: source,
type: 'image/jpg',
name: '123.jpg',
});
const data = () => {
fetch(api ,{
method: 'POST',
body: form,
})
that's the response i get from the server:
{
"_bodyBlob": {
"_data": {
"__collector": [
Object
],
"blobId": "78B18938-15BF-4F18-B3C8-1EB30A24D9F8",
"name": "test.html",
"offset": 0,
"size": 192,
"type": "text/html"
}
},
"_bodyInit": {
"_data": {
"__collector": [
Object
],
"blobId": "78B18938-15BF-4F18-B3C8-1EB30A24D9F8",
"name": "test.html",
"offset": 0,
"size": 192,
"type": "text/html"
}
},
"bodyUsed": false,
"headers": {
"map": {
"connection": "keep-alive",
"content-length": "192",
"content-type": "text/html",
"date": "Mon, 02 Nov 2020 22:57:21 GMT",
"server": "PythonAnywhere"
}
},
"ok": false,
"status": 400,
"statusText": undefined,
"type": "default",
"url": api
}
Although this python code works perfectly and gets a correct response
img = {'file':('123.png', open('the path to the pic/123.png', 'rb'), 'image/png)}
post(api, files = img)
is there any way to get this working or its the server side problem that can't receive the correct arguments?
Adding "file://" to the beginning of the source string fixed the problem.
so the src looks like
var source = 'file:///Users/alexx/Library/Developer/CoreSimulator/Devices/44F0FA92-4898-4CFB-862E-4E5EC4C8AB28/data/Containers/Bundle/Application/34BCE695-4B4F-472F-AB5C-F2336AC45273/DoorLock.app/123.jpg';
then it fetches perfectly, hope it helps anybody who tries to send a local image using formdata, the summary looks like this now
const form = new FormData();
form.append('file', {
uri: source,
name: '123.jpg',
fileName: 'file', //optional
});
fetch(uri,{
method: 'post',
body: form,
})
.then(response => {
console.log("image uploaded")
console.log(response)
})
.catch(console.log);
In Formdata when you pass files, you need to pass 3 parameters where
key expected from the backend (in your case image).
It will be an object which has three properties named name, type, and uri where type is the mime type (ex: image/jpeg).
name of the file
Eg:
data.append("FilePath",{
name:"image.png",
type:"image/png",
uri:"content://com.camera/image.png"
},image.png)
I'm using ImageInput component inside an iterator to upload images in my create form and it generates a structure like this:
"data": {
"items": [
{
"id": 1,
"title": "test",
"subTitle": "test",
"additionalAttributes": {
"price": "3452345"
},
"images": [
{
"src": {
"rawFile": {
"path": "test.jpg"
},
"src": "blob:https://localhost:44323/82c04494-244a-49eb-9d0e-6bca5a3469f7",
"title": "test.jpg"
},
"title": "d"
}
]
}
],
"contact": {
"firstName": "test",
"lastName": "test",
"jobTitle": "test",
"emailAddress": "test#test.com",
"phoneNumber": "23234"
},
"theme_id": 1,
"endDate": "2020-06-19T22:27:00.000Z",
"status": "2"
}
}
What I'm trying to do is sending the image to an API for saving in a folder. Blob URL is an internal object in the browser son it can't be used in the API, so I tried to convert the Blob URL into a binary and send to API.
Following the tutorial I can not get the expected result. Here is my code:
I created a new dataProvider like this:
export const PrivateEventProvider = {
create: (resource: string, params: any) => {
convertFileToBase64(params.data.items[0].images[0].src.src).then(
transformedPicture => {
console.log(`transformedPicture: ${transformedPicture}`);
}
);
const convertFileToBase64 = (file: { rawFile: Blob }) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => resolve(reader.result);
reader.onerror = reject;
reader.readAsDataURL(file.rawFile);
});
And I have this error
Unhandled Rejection (TypeError): Failed to execute 'readAsDataURL' on
'FileReader': parameter 1 is not of type 'Blob'.
enter image description here
So my question is, which is the correct way of uploading images to a folder using react-admin?
I've been having troubles with Elastic Search (ES) dynamic mappings. Seems like I'm in a catch-22. https://www.elastic.co/guide/en/elasticsearch/guide/current/custom-dynamic-mapping.html
The main goal is to store everything as a string that comes into ES.
What I've tried:
In ES you can't create a dynamic mapping until the index has been
created. Okay, makes sense.
I can't create an empty index, so if
the first item sent into the index is not a string, I can't
re-assign it... I won't know what type of object with be the first
item in the index, it could be any type, due to how the the app accepts a variety of objects/events.
So if I can't create the mapping ahead of time, and I can't insert an empty index to create the mapping, and I can't change the mapping after the fact, how do I deal with the first item if its NOT a string???
Here's what I'm currently doing (using the Javascript Client).
createESIndex = function (esClient){
esClient.index({
index: 'timeline-2015-11-21',
type: 'event',
body: event
},function (error, response) {
if (error) {
logger.log(logger.SEVERITY.ERROR, 'acceptEvent elasticsearch create failed with: '+ error + " req:" + JSON.stringify(event));
console.log(logger.SEVERITY.ERROR, 'acceptEvent elasticsearch create failed with: '+ error + " req:" + JSON.stringify(event));
res.status(500).send('Error saving document');
} else {
res.status(200).send('Accepted');
}
});
}
esClientLookup.getClient( function(esClient) {
esClient.indices.putTemplate({
name: "timeline-mapping-template",
body:{
"template": "timeline-*",
"mappings": {
"event": {
"dynamic_templates": [
{ "timestamp-only": {
"match": "#timestamp",
"match_mapping_type": "date",
"mapping": {
"type": "date",
}
}},
{ "all-others": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"type": "string",
}
}
}
]
}
}
}
}).then(function(res){
console.log("put template response: " + JSON.stringify(res));
createESIndex(esClient);
}, function(error){
console.log(error);
res.status(500).send('Error saving document');
});
});
Index templates to the rescue !! That's exactly what you need, the idea is to create a template of your index and as soon as you wish to store a document in that index, ES will create it for you with the mapping you gave (even dynamic ones)
curl -XPUT localhost:9200/_template/my_template -d '{
"template": "index_name_*",
"settings": {
"number_of_shards": 1
},
"mappings": {
"type_name": {
"dynamic_templates": [
{
"strings": {
"match": "*",
"match_mapping_type": "*",
"mapping": {
"type": "string"
}
}
}
],
"properties": {}
}
}
}'
Then when you index anything in an index whose name matches index_name_*, the index will be created with the dynamic mapping above.
For instance:
curl -XPUT localhost:9200/index_name_1/type_name/1 -d '{
"one": 1,
"two": "two",
"three": true
}'
That will create a new index called index_name_1 with a mapping type for type_name where all properties are string. You can verify that with
curl -XGET localhost:9200/index_name_1/_mapping/type_name
Response:
{
"index_name_1" : {
"mappings" : {
"type_name" : {
"dynamic_templates" : [ {
"strings" : {
"mapping" : {
"type" : "string"
},
"match" : "*",
"match_mapping_type" : "*"
}
} ],
"properties" : {
"one" : {
"type" : "string"
},
"three" : {
"type" : "string"
},
"two" : {
"type" : "string"
}
}
}
}
}
}
Note that if you're willing to do this via the Javascript API, you can use the indices.putTemplate call.
export const user = {
email: {
type: 'text',
},
};
export const activity = {
date: {
type: 'text',
},
};
export const common = {
name: {
type: 'text',
},
};
import { Client } from '#elastic/elasticsearch';
import { user } from './user';
import { activity } from './activity';
import { common } from './common';
export class UserDataFactory {
private schema = {
...user,
...activity,
...common,
relation_type: {
type: 'join',
eager_global_ordinals: true,
relations: {
parent: ['activity'],
},
},
};
constructor(private client: Client) {
Object.setPrototypeOf(this, UserDataFactory.prototype);
}
async create() {
const settings = {
settings: {
analysis: {
normalizer: {
useLowercase: {
filter: ['lowercase'],
},
},
},
},
mappings: {
properties: this.schema,
},
};
const { body } = await this.client.indices.exists({
index: ElasticIndex.UserDataFactory,
});
await Promise.all([
await (async (client) => {
await new Promise(async function (resolve, reject) {
if (!body) {
await client.indices.create({
index: ElasticIndex.UserDataFactory,
});
}
resolve({ body });
});
})(this.client),
]);
await this.client.indices.close({ index: ElasticIndex.UserDataFactory });
await this.client.indices.putSettings({
index: ElasticIndex.UserDataFactory,
body: settings,
});
await this.client.indices.open({
index: ElasticIndex.UserDataFactory,
});
await this.client.indices.putMapping({
index: ElasticIndex.UserDataFactory,
body: {
dynamic: 'strict',
properties: {
...this.schema,
},
},
});
}
}
wrapper.ts
class ElasticWrapper {
private _client: Client = new Client({
node: process.env.elasticsearch_node,
auth: {
username: 'elastic',
password: process.env.elasticsearch_password || 'changeme',
},
ssl: {
ca: process.env.elasticsearch_certificate,
rejectUnauthorized: false,
},
});
get client() {
return this._client;
}
}
export const elasticWrapper = new ElasticWrapper();
index.ts
new UserDataFactory(elasticWrapper.client).create();
Here is my code:
function runQuery(){
var json = JSON.stringify({"string": "eight", "number": "8", "id": "8"});
var body = {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": [
{
"insertId": "8",
"json": json
}
]
};
var request = gapi.client.bigquery.tabledata.insertAll({
'projectId': project_id,
'datasetId': 'newTest',
'tableId': 'newTable',
'content': body
});
request.execute(function(response){
console.log(response);
});
}
But this code return error: No records present in table data append request. (400)
Can somebody help me?
Problem solved:
var request = gapi.client.bigquery.tabledata.insertAll({
'projectId': projectId,
'datasetId': datasetId,
'tableId': tableId,
"kind": "bigquery#tableDataInsertAllRequest",
"rows":[
{
"insertId": "10",
"json": {"id": "10", "string": "ten", "number": "10"}
}
]
});
Spent a lot of time on the same issue.
dbepcepk's answer resulted in the error:
"Error: Invalid JSON payload received. Unknown name "rows": Cannot bind query parameter. 'rows' is a message type. Parameters can only be bound to primitive types."
Here is how I got around it:
Note the JSON Key "resource" defining the TableDataInsertAllRequest object
const request ={
"projectId": projectId,
"datasetId": datasetId,
"tableId": tableId,
"resource":{
"kind": "bigquery#tableDataInsertAllRequest",
"skipInvalidRows": false,
"ignoreUnknownValues": false,
"templateSuffix":"",
"rows": [{"json": {"test":true}]
}
}
client.tabledata.insertAll(
request,
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
You can't append data to the sample data set. Create your own to perform your inserts.