Upload larger files to S3 using not multipart but put upload - file-upload

I want to upload and encode video file using S3, elastic-transcoder and lambda fuction.
Lambda function and settings work fine, but when I upload video file(mp4), it is automatically 'multipart' uploaded (because it's larger than 5mb).
And maybe because of that, job processes on elastic-transcoder run three times. So I got to run unnecessary job twice on each upload (the first job successfully encode video file).
Is there any ways to avoid this, such as force to upload larger file (about 60mb) not using 'multipart' but 'put' function? And I'd prefer to do this job from browser console for non engineers.
And this is lambda function to submit job for ElasticTranscoder. When I upload video files, 'put' doesn't work but 'multipart' event works. 'Put' event works for smaller files like images or txt, just for testing.
console.log('Loading event');
var aws = require('aws-sdk');
var s3 = new aws.S3({apiVersion: '2006-03-01'});
var ets = new aws.ElasticTranscoder({apiVersion: '2012-09-25', region: 'us-west-2'});
exports.handler = function(event, context) {
console.log('Received event:');
console.log(JSON.stringify(event, null, ' '));
var bucket = event.Records[0].s3.bucket.name;
var key = event.Records[0].s3.object.key;
var fileName = key.split('.')[0];
s3.getObject({Bucket:bucket, Key:key},
function(err,data) {
if (err) {
console.log('error getting object ' + key + ' from bucket ' + bucket +
'. Make sure they exist and your bucket is in the same region as this function.');
context.done('error','error getting file'+err);
} else {
console.log("### JOB KEY ### " + key);
ets.createJob({
PipelineId: '***',
Input: {
Key: key,
FrameRate: 'auto',
Resolution: 'auto',
AspectRatio: 'auto',
Interlaced: 'auto',
Container: 'auto',
},
Output: {
Key: fileName + '.m3u8',
ThumbnailPattern: fileName + '-thumbs-{count}',
PresetId: '1351620000001-200035',
Rotate: 'auto'
}
}, function(error, data) {
if(error) {
console.log(error);
} else {
console.log('Job submitted');
}
});
}
}
);
};

Related

React native : can't unzip the file I get with rn-fetch-blob

I'm trying to download a zip file with rn-fetch-blob, then when I got this file I unzip it with React-native-zip-archive.
It often works well, but sometimes, the "unzipFile()" function I've created can't unzip the file, like if it is corrupted.
Someone already got this problem ?
Here is my code :
downloadZipFile(res => {
unzipFile(res.path(), (boolean, path) => {
if (boolean !== false) {
db = SQLite.openDatabase({
name: "addb.sqlite",
location: "default",
createFromLocation: path
}).then(DB => {
db = DB;
db.transaction(tx => {
tx.executeSql(
"SELECT * FROM sqlite_master",
[],
(tx, results) => {
console.log("Logs sqlite_master");
const rows = results.rows;
for (let i = 0; i < rows.length; i++) {
console.log(_getCurrentDate());
datas.push({
...rows.item(i)
});
}
console.log(datas);
callback(true);
},
(tx, err) => {
console.log(err)
}
);
});
});
} else {
console.log("Can't create database");
callback(false);
}
});
});
And the functions I used :
export function downloadZipFile(callback) {
RNFetchBlob.config({
fileCache: true
})
.fetch(
"GET",
"MY LINK"
)
.then(res => {
console.log("The file saved to ", res.path());
callback(res);
})
.catch((errorMessage, statusCode) => {
// error handling
console.log(
"erreur : " + errorMessage + " and statuscode : " + statusCode
);
});
}
export function unzipFile(sourcePath, callback) {
const charset = "UTF-8";
const targetPath = "/data/user/0/com.myapp/databases/";
unzip(sourcePath, targetPath, charset)
.then(path => {
console.log(`unzip completed at ${path}`);
callback(true, path);
})
.catch(error => {
console.log("there is an error" + error);
callback(false, null);
});
}
Others informations :
The file is a database that I have to put in the "databases" folder's application. I tried to put a console.log(path) everywhere in the "unzipFile()" function to see if the file is really created when I try to unzip it, and it seems he is hereā€¦ And when the file is impossible to unzip, it does the same size as the others which work.
rn-fetch-blob calls an api which copy an existant distant database and zip it as an axd file. Is there any problem with this format ? Can the api be the problem ?
The axd file created by the api is used by an existant application and seems to work correctly for the existant application. Moreover, when we download the file without rn-fetch-blob (by copying the link in my navigator), it works correctly everytime I tried.
I tried to download the file directly,the api always sent me the same file (a zip file or an axd file), and it works without problem (20 try). Can the problem be the delay to download the file ? With the api, it takes 5 or 6 seconds, without it takes 2 seconds. But I think my unzipFile() function only start when the file is downloaded, no ? And as I said, when I put a console.log(path) in the unzipFile() function, the file is here, with same size as others...
I Don't know how to make it works everytime, hope someone can help me :)
Ty !
I tried to put a for(let i = 1; i < 101; i++) to do the RNFB 100 times :
it works 97 times / 100 and 96 times /100...
Then I tried to put a timer, to be sure the file is finished to download, it works 3 times / 100...
And I deleted the timer, and now it never works anymore, or 5 times / 100...
I really Don't understand what is the problem :(

CKEditor 5 Image Upload Issues

I'm using ckeditor5 into my project. I have to support image upload so I have search and followed this stackoverflow article.
I have created an uploadAdapter which is:
class UploadAdapter {
constructor( loader, url, t ) {
this.loader = loader;
this.url = url;
this.t = t;
}
upload() {
return new Promise( ( resolve, reject ) => {
this._initRequest();
this._initListeners( resolve, reject );
this._sendRequest();
} );
}
abort() {
if ( this.xhr ) {
this.xhr.abort();
}
}
_initRequest() {
const xhr = this.xhr = new XMLHttpRequest();
xhr.open( 'POST', this.url, true );
xhr.responseType = 'json';
}
_initListeners( resolve, reject ) {
const xhr = this.xhr;
const loader = this.loader;
const t = this.t;
const genericError = t( 'Cannot upload file:' ) + ` ${ loader.file.name }.`;
xhr.addEventListener( 'error', () => reject( genericError ) );
xhr.addEventListener( 'abort', () => reject() );
xhr.addEventListener( 'load', () => {
const response = xhr.response;
if ( !response || !response.uploaded ) {
return reject( response && response.error && response.error.message ? response.error.message : genericError );
}
resolve( {
default: response.url
} );
} );
if ( xhr.upload ) {
xhr.upload.addEventListener( 'progress', evt => {
if ( evt.lengthComputable ) {
loader.uploadTotal = evt.total;
loader.uploaded = evt.loaded;
}
} );
}
}
_sendRequest() {
const data = new FormData();
data.append( 'upload', this.loader.file );
this.xhr.send( data );
}
}
import Plugin from '#ckeditor/ckeditor5-core/src/plugin';
import FileRepository from '#ckeditor/ckeditor5-upload/src/filerepository';
export default class GappUploadAdapter extends Plugin {
static get requires() {
return [ FileRepository ];
}
static get pluginName() {
return 'GappUploadAdapter';
}
init() {
const url = this.editor.config.get( 'gapp.uploadUrl' );
if ( !url ) {
return;
}
this.editor.plugins.get( FileRepository ).createUploadAdapter = loader => new UploadAdapter( loader, url, this.editor.t );
}
}
Now this is explained. I have 2 issues.
Once uploaded ( my upload on server is working fine and returning a valid url in format {default: url}, why is my image content inserted as data-uri and not in url as for easy image demo here. I want my image to be url like.
I would like to listen for a kind of success upload image ( with image id retrieved from upload server call ) to insert some content in my page. How to proceed ?
Thanks for help.
PS: I'm building ckeditor with command 'npm run build' from git repo cloned from https://github.com/ckeditor/ckeditor5-build-classic
EDIT:
Thanks to accepted response, I saw that I was wrong in returned data. I was not returning any URL in my uploader front end which was causing editor image to stay in img-data way. Once valid URL was returned, it was parsed automatically and my editor image was containing a valid url.
If the data-uri is still used after successful upload I would assume that server response was not processed correctly and the received url could not be retrieved. I have tested adapter code you provided and it works fine (with CKFinder on server side). I would check how the upload server response looks and if it can be correctly parsed.
When using CKFinder you will see:
and a parsed JSON response:
You could check if response is processed correctly in your adapter in:
xhr.addEventListener( 'load', () => {
const response = xhr.response;
...
}
Listening to successful image upload may be tricky as there is no event directly related to it. Depending on what exactly you are trying to achieve you may try to extend you custom loader so when successful response is received (and resolve() called) you may execute some code. However, in this state the image element is still not updated (in model, view and DOM) with new URL and UploadAdapter lacks a direct access to editor instance so it may be hard to do anything complex.
Better way may be to listen to model changes, the similar way it is done in ImageUploadEditing plugin (see code here) checking the image uploadStatus attribute change:
editor.model.document.on( 'change', () => {
const changes = doc.differ.getChanges();
for ( const entry of changes ) {
const uploaded = entry.type === 'attribute' && entry.attributeNewValue === 'complete' && entry.attributeOldValue === 'uploading';
console.log( entry );
}
} );
If it changes from uploading to complete it means the images was successfully uploaded:
You may also take a look at another answer, which shows how to hook into FileRepository API to track entire upload process - https://github.com/ckeditor/ckeditor5-image/issues/243#issuecomment-442393578.

Videojs duration displaying as 0 for entire video

I'm using videojs. For some reason the duration of videos is displaying as 0, even when fully loaded.
At line 2487 of the video.js file I've made sure this section...
ControlBar.prototype.options_ = {
children: ['playToggle', 'volumeMenuButton', 'currentTimeDisplay', 'timeDivider', 'durationDisplay', 'progressControl', 'liveDisplay', 'remainingTimeDisplay', 'customControlSpacer', 'playbackRateMenuButton', 'chaptersButton', 'descriptionsButton', 'subtitlesButton', 'captionsButton', 'audioTrackButton', 'fullscreenToggle']
};
...includes the 'durationDisplay' property, so does anyone know why the duration is displaying as 0?
The videos are mp4 and are loaded inside an AngularJS directive:
app.directive('engVideo',['$timeout', '$http', function($timeout, $http) {
return {
restrict: 'A',
priority: 100,
replace: true,
templateUrl: 'components/video.html',
link: function(scope, element, attrs) {
....
function VideoJSPlayerInit(window, videojs) {
var player = videojs(scope.component.video.id, {
html5: {
nativeTextTracks: false
}
});
player.pause();
}
From a suggestion in the comments, I've also tried listening for the 'loadedmetadata' event, when the videojs element is created, like this:
function VideoJSPlayerInit(window, videojs) {
var player = videojs(scope.component.video.id, {
html5: {
nativeTextTracks: false
}
}, function() {
this.on('loadedmetadata', function(){
console.log("video metadata loaded");
});
}
);
But nothing gets output to console - so I'm guessing there's no metadata loaded(?) I have also changed it to listen for the 'loadeddata' event and that DOES gets consoled.
Could this be a video encoding issue? I've been looking for how to export from Premiere with the duration metadata included, but as far as I can tell, it's there.
Any clues, much appreciated.
OK, I've finally figured it out: It was not to do with metadata; The version of video.js we're using for some reason was hardcoding the duration value as '0:00'. If it's useful to anyone else, here's what I added (to the video.js file from line 5241) to get the duration to display correctly:
DurationDisplay.prototype.createEl = function createEl() {
var el = _Component.prototype.createEl.call(this, 'div', {
className: 'vjs-duration vjs-time-control vjs-control'
});
// following three lines are new...
var intSeconds = parseInt(this.player_.duration());
var intMinutes = parseInt(intSeconds / 60);
intSeconds = intSeconds - (60 * intMinutes);
this.contentEl_ = Dom.createEl('div', {
className: 'vjs-duration-display',
// label the duration time for screen reader users
//innerHTML: '<span class="vjs-control-text">' + this.localize('Duration Time') + '</span> 0:00' // - old line
innerHTML: '<span class="vjs-control-text">' + this.localize('Duration Time') + '</span>' + intMinutes + ':' + intSeconds
}, {
// tell screen readers not to automatically read the time as it changes
'aria-live': 'off'
});
el.appendChild(this.contentEl_);
return el;
};

PDF Blob is not showing content, Angular 2

I have problem very similar to this PDF Blob - Pop up window not showing content, but I am using Angular 2. The response on question was to set responseType to arrayBuffer, but it not works in Angular 2, the error is the reponseType does not exist in type RequestOptionsArgs. I also tried to extend it by BrowserXhr, but still not work (https://github.com/angular/http/issues/83).
My code is:
createPDF(customerServiceId: string) {
console.log("Sending GET on " + this.getPDFUrl + "/" + customerServiceId);
this._http.get(this.getPDFUrl + '/' + customerServiceId).subscribe(
(data) => {
this.handleResponse(data);
});
}
And the handleResponse method:
handleResponse(data: any) {
console.log("[Receipt service] GET PDF byte array " + JSON.stringify(data));
var file = new Blob([data._body], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
}
I also tried to saveAs method from FileSaver.js, but it is the same problem, pdf opens, but the content is not displayed. Thanks
I had a lot of problems with downloading and showing content of PDF, I probably wasted a day or two to fix it, so I'll post working example of how to successfully download PDF or open it in new tab:
myService.ts
downloadPDF(): any {
return this._http.get(url, { responseType: ResponseContentType.Blob }).map(
(res) => {
return new Blob([res.blob()], { type: 'application/pdf' })
}
}
myComponent.ts
this.myService.downloadPDF().subscribe(
(res) => {
saveAs(res, "myPDF.pdf"); //if you want to save it - you need file-saver for this : https://www.npmjs.com/package/file-saver
var fileURL = URL.createObjectURL(res);
window.open(fileURL); / if you want to open it in new tab
}
);
NOTE
It is also worth mentioning that if you are extending Http class to add headers to all your requests or something like that, it can also create problems for downloading PDF because you will override RequestOptions, which is where we add responseType: ResponseContentType.Blob and this will get you The request body isn't either a blob or an array buffer error.
ANGULAR 5
I had the same problem which I lost few days on that.
Here my answer may help others, which helped to render pdf.
For me even though if i mention as responseType : 'arraybuffer', it was unable to take it.
For that you need to mention as responseType : 'arraybuffer' as 'json'.(Reference)
Working code
downloadPDF(): any {
return this._http.get(url, { responseType: 'blob' as 'json' }).subscribe((res) => {
var file = new Blob([res], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
}
}
Referred from the below link
https://github.com/angular/angular/issues/18586
Amit,
You can rename the filename by adding a variable to the end of the string
so saveAs(res, "myPDF.pdf");
Becomes
saveAs(res, "myPDF_"+someVariable+".pdf");
where someVariable might be a counter or my personal favorite a date time string.
This worked for me
var req = this.getPreviewPDFRequest(fd);
this.postData(environment.previewPDFRFR, req).then(res => {
res.blob().then(blob => {
console.clear();
console.log(req);
console.log(JSON.stringify(req));
const fileURL = URL.createObjectURL(blob);
window.open(fileURL, '', 'height=650,width=840');
})
});
Server side (Java/Jetty) : REST service that returns a File Response
The File Response itself will automatically be parsed into a pdf blob file by Jetty (because of the annotation #Produces("application/pdf") ), in other to be send to and read by the web client
#GET
#Path("/download-pdf/{id}")
#Produces("application/pdf")
public Response downloadPDF(#ApiParam(value = "Id of the report record")
#PathParam("id") Long id) {
ResponseBuilder response = null;
try {
PDFReportService service = new PDFReportService();
File reportFile = service.getPDFReportFile(id);
response = Response.ok((Object) reportFile);
response.header("Content-Disposition","attachment; filename="+reportFile.getName());
return response.build();
} catch (DomainException e) {
response = Response.serverError().entity("server.error");
}
return response.build();
}
Client side code (angular 2) : grab the blob and print it in a new browser tab
The key is to insure that you read the request reponse as a blob (as the server returned a blob; in my case)
Now, I tried so hard but I finally figured out that Angular 2 has not implemented any function to handle blob responses (neither res['_body'], nor res.blob() worked for me)
So I found no other workaround than using JQuery ajax to perform that file blob request, like following:
public downloadPDFFile() {
let fileURL = serverURL+"/download-pdf/"+id;
let userToken: string = your_token;
showWaitingLoader();
$.ajax({
url: fileURL,
cache: false,
headers: {
"Content-Type": "application/json",
"Authorization": "Basic " + userToken
},
xhrFields: {
responseType: 'blob' //Most important : configure the response type as a blob
},
success: function(blobFile) {
const url = window.URL.createObjectURL(blobFile);
window.open(url);
stopWaitingLoader();
},
error: function(e){
console.log("DOWNLOAD ERROR :", e);
}
});
}

How can I limit the file size for a direct cloudinary upload (client side jquery)

I cant find in the documentation how to limit the size of the file when using direct upload. I am generating the file upload input field in the server side using:
cloudinary.uploader.image_upload_tag
And after that the file is uploaded from the client side.
Also. $cloudinary.config seems to be designed for specifying the api key and bucket name, no other configuration.
I appreciate any light on this!
If you using direct upload, fix your max uploadable file size in direct upload javascript billow are code sample for that.
$(function () {
$('#direct_upload input[type="file"]')
.fileupload({
dropZone: '#direct_upload',
disableImageResize: true,
imageMaxWidth: 600,
imageMaxHeight: 600,
maxFileSize:2048 //2 mb
acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i,
start: function () {
$("#direct_upload .progress").attr("style","display:block")
},
progress: function (e, data) {
var com = Math.round((data.loaded * 100.0) / data.total)
$("#direct_upload .progress .progress-bar").attr("style","width:"+com+"%")
$("#direct_upload .progress .progress-bar").text(com +"% Complete")
},
})
.on('cloudinarydone', function (e, data) {
$('#submit-btn').attr('style','');
$("#direct_upload .progress").attr("style","display:none")
$("#direct_upload .progress .progress-bar").attr("style","width:0%")
$("#direct_upload .progress .progress-bar").text("0% Complete")
$.post(this.form.action, $(this.form).serialize()).always(function (result, status, jqxhr) {
$('.status_value').text(result.errors ? JSON.stringify(result.errors) : status);
});
var image_url = $.cloudinary.image(data.result.public_id, {
format: data.result.format, width: 75, height: 75,style: "display:none", crop: "fill"
})
$('.Submitted').append(image_url);
});
});
Hope this work for you.
maxFileSize:2048
This fix your file size max to 2 mb.