pion/laravel-chunk-upload Laravel not working with large files - laravel-8

I am using resumable.js and Laravel Chunk for uploading large files. It works for small files but didn't work for > 500mb. Chunk is part the file but it didn't re-compile the file to a given directory.
Namespaces
use Illuminate\Http\Request;
use Illuminate\Http\UploadedFile;
use Pion\Laravel\ChunkUpload\Exceptions\UploadMissingFileException;
use Pion\Laravel\ChunkUpload\Handler\AbstractHandler;
use Pion\Laravel\ChunkUpload\Handler\HandlerFactory;
use Pion\Laravel\ChunkUpload\Receiver\FileReceiver;
use Illuminate\Support\Facades\Storage;
Controller
$receiver = new FileReceiver('file', $request, HandlerFactory::classFromRequest($request));
if (!$receiver->isUploaded()) {
// file not uploaded
}
$fileReceived = $receiver->receive(); // receive file
if ($fileReceived->isFinished()) { // file uploading is complete / all chunks are uploaded
$file = $fileReceived->getFile(); // get file
$extension = $file->getClientOriginalExtension();
$fileName = str_replace('.'.$extension, '', $file->getClientOriginalName()); //file name without extenstion
$fileName .= '_' . md5(time()) . '.' . $extension; // a unique file name
$disk = Storage::disk('new');
$path = $disk->putFileAs('resources/techpacks', $file, $fileName);
// delete chunked file
unlink($file->getPathname());
// return [
// 'path' => asset('storage/' . $path),
// 'filename' => $fileName
// ];
}
Resumable
let browseFile = $('#browseFile');
let resumable = new Resumable({
target: "{{ url('admin/techpack/insert') }}",
query:{_token:'{{ csrf_token() }}'} ,// CSRF token
fileType: ['zip'],
chunkSize: 10*1024*1024, // default is 1*1024*1024, this should be less than your maximum limit in php.ini
headers: {
'Accept' : 'application/json'
},
testChunks: false,
throttleProgressCallbacks: 1,
});
resumable.assignBrowse(browseFile[0]);
resumable.on('fileAdded', function (file) { // trigger when file picked
showProgress();
resumable.upload() // to actually start uploading.
});
resumable.on('fileProgress', function (file) { // trigger when file progress update
updateProgress(Math.floor(file.progress() * 100));
});
resumable.on('fileSuccess', function (file, response) { // trigger when file upload complete
// response = JSON.parse(response)
console.log(response)
});
resumable.on('fileError', function (file, response) { // trigger when there is any error
alert('file uploading error.')
});
let progress = $('.progress');
function showProgress() {
progress.find('.progress-bar').css('width', '0%');
progress.find('.progress-bar').html('0%');
progress.find('.progress-bar').removeClass('bg-success');
progress.show();
}
function updateProgress(value) {
progress.find('.progress-bar').css('width', `${value}%`)
progress.find('.progress-bar').html(`${value}%`)
}
function hideProgress() {
progress.hide();
}
Chunk Folder
After Re-Building
Added to Target Folder
But I can't open the final file. Please guide me.
Windows Error
Resumable Error

Related

Error when merging multiple pdf files into one file in Google Drive

I used the following code (taken from PDF.CO) to merge multiple pdf files in Google Drive:
/**
* Initial Declaration and References
*/
// Get the active spreadsheet and the active sheet
ss = SpreadsheetApp.getActiveSpreadsheet();
ssid = ss.getId();
// Look in the same folder the sheet exists in. For example, if this template is in
// My Drive, it will return all of the files in My Drive.
var ssparents = DriveApp.getFileById(ssid).getParents();
// Loop through all the files and add the values to the spreadsheet.
var folder = ssparents.next();
/**
* Add PDF.co Menus in Google Spreadsheet
*/
function onOpen() {
var menuItems = [
{name: 'Get All PDF From Current Folder', functionName: 'getPDFFilesFromCurFolder'},
{name: 'Merge PDF URLs Listed In Cell', functionName: 'mergePDFDocuments'}
];
ss.addMenu('PDF.co', menuItems);
}
/**
* Get all PDF files from current folder
*/
function getPDFFilesFromCurFolder() {
var files = folder.getFiles();
var pdfUrlCell = ss.getRange("A4");
var allFileUrls = [];
while (files.hasNext()) {
var file = files.next();
var fileName = file.getName();
if(fileName.endsWith(".pdf")){
// Make File Pulblic accessible with URL so that it can be accessible with external API
var resource = {role: "reader", type: "anyone"};
Drive.Permissions.insert(resource, file.getId());
// Add Url
allFileUrls.push(file.getDownloadUrl());
}
pdfUrlCell.setValue(allFileUrls.join(","));
}
}
function getPDFcoApiKey(){
// Get PDF.co API Key Cell
let pdfCoAPIKeyCell = ss.getRange("B1");
return pdfCoAPIKeyCell.getValue();
}
/**
* Function which merges documents using PDF.co
*/
function mergePDFDocuments() {
// Get Cells for Input/Output
let pdfUrlCell = ss.getRange("A4");
let resultUrlCell = ss.getRange("B4");
let pdfUrl = pdfUrlCell.getValue();
// Prepare Payload
const data = {
"async": true, // As we have large volumn of PDF files, Enabling async mode
"name": "result",
"url": pdfUrl
};
// Prepare Request Options
const options = {
'method' : 'post',
'contentType': 'application/json',
'headers': {
"x-api-key": getPDFcoApiKey()
},
// Convert the JavaScript object to a JSON string.
'payload' : JSON.stringify(data)
};
// Get Response
// https://developers.google.com/apps-script/reference/url-fetch
const resp = UrlFetchApp.fetch('https://api.pdf.co/v1/pdf/merge', options);
// Response Json
const respJson = JSON.parse(resp.getContentText());
if(respJson.error){
console.error(respJson.message);
}
else{
// Job Success Callback
const successCallbackFn = function(){
// Upload file to Google Drive
uploadFile(respJson.url);
// Update Cell with result URL
resultUrlCell.setValue(respJson.url);
}
// Check PDF.co Job Status
checkPDFcoJobStatus(respJson.jobId, successCallbackFn);
}
}
/**
* Checks PDF.co Job Status
*/
function checkPDFcoJobStatus(jobId, successCallbackFn){
// Prepare Payload
const data = {
"jobid": jobId
};
// Prepare Request Options
const options = {
'method' : 'post',
'contentType': 'application/json',
'headers': {
"x-api-key": getPDFcoApiKey()
},
// Convert the JavaScript object to a JSON string.
'payload' : JSON.stringify(data)
};
// Get Response
// https://developers.google.com/apps-script/reference/url-fetch
const resp = UrlFetchApp.fetch('https://api.pdf.co/v1/job/check', options);
// Response Json
const respJson = JSON.parse(resp.getContentText());
if(respJson.status === "working"){
// Pause for 3 seconds
Utilities.sleep(3 * 1000);
// And check Job again
checkPDFcoJobStatus(jobId, successCallbackFn);
}
else if(respJson.status == "success"){
// Invoke Success Callback Function
successCallbackFn();
}
else {
console.error(`Job Failed with status ${respJson.status}`);
}
}
/**
* Save file URL to specific location
*/
function uploadFile(fileUrl) {
var fileContent = UrlFetchApp.fetch(fileUrl).getBlob();
folder.createFile(fileContent);
}
It runs perfectly the first time, but then gives an error:
Exception: Request failed for https://api.pdf.co returned code 402. Truncated server response: {"status":"error","errorCode":402,"error":true,"message":"Not enough credits, subscription expired or metered use is not allowed. Please review cre... (use muteHttpExceptions option to examine full response).

Return csv file - Deno

i'd like return csv file on an API with Deno & oak library. I've generated my csv file, but i dont know how can i return it to my GET endpoint for downlload.
let fileName: string = `./temp/activities`
fileName += start ? `-(${start})` : ``
fileName += end ? `-(${end})` : `-(${moment().format('YYYY-MMM-DD')})`
fileName += `.csv`
const file = await Deno.open(fileName, { write: true, create: true, truncate: true });
const header = ["company", "project", "url_project", "activity", "url_activity", "date"];
await writeCSVObjects(file, activities, { header });
file.close();
ctx.response.body = await Deno.readFile(fileName);
ctx.response.headers.set("Content-Type", "application/force-download")
ctx.response.headers.set('Content-disposition', 'attachment; filename=activities.csv');
Well after read deno doc i found correct method:
await Deno.readFile(fileName);
(https://deno.land/typedoc/index.html#readfile)
and its important add header response:
ctx.response.headers.set('Content-disposition', 'attachment; filename=activities.csv');

Sending audio file created with RecordRTC to my server

I am new to working with Javascript, PHP, and with servers generally. I am working on a web page that will record audio from the user and save it to my server, using RecordRTC. I'm a bit confused about the XMLHttpRequest portion - how do I alter the following code to send to my server instead of the webrtc server?
function uploadToServer(recordRTC, callback) {
var blob = recordRTC instanceof Blob ? recordRTC : recordRTC.blob;
var fileType = blob.type.split('/')[0] || 'audio';
var fileName = (Math.random() * 1000).toString().replace('.', '');
if (fileType === 'audio') {
fileName += '.' + (!!navigator.mozGetUserMedia ? 'ogg' : 'wav');
} else {
fileName += '.webm';
}
// create FormData
var formData = new FormData();
formData.append(fileType + '-filename', fileName);
formData.append(fileType + '-blob', blob);
callback('Uploading ' + fileType + ' recording to server.');
makeXMLHttpRequest('https://webrtcweb.com/RecordRTC/', formData, function(progress) {
if (progress !== 'upload-ended') {
callback(progress);
return;
}
var initialURL = 'https://webrtcweb.com/RecordRTC/uploads/';
callback('ended', initialURL + fileName);
listOfFilesUploaded.push(initialURL + fileName);
});
}
Via my web hosting provider, I'm using an Apache server, phpMyAdmin, and a mySQL database. Do I just replace
makeXMLHttpRequest(https://webrtcweb.com/RecordRTC/
with "https://mywebsite.com" and replace
var initialURL = 'https://webrtcweb.com/RecordRTC/uploads/';
with the path to the file I created to hold these audio files (https://mywebsite.com/uploads)? Then set permissions for that folder to allow public write capabilities (this seems unsafe, is there a good method)?
This is the makeXMLHttpRequest function:
function makeXMLHttpRequest(url, data, callback) {
var request = new XMLHttpRequest();
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
callback('upload-ended');
}
};
request.upload.onloadstart = function() {
callback('Upload started...');
};
request.upload.onprogress = function(event) {
callback('Upload Progress ' + Math.round(event.loaded / event.total * 100) + "%");
};
request.upload.onload = function() {
callback('progress-about-to-end');
};
request.upload.onload = function() {
callback('progress-ended');
};
request.upload.onerror = function(error) {
callback('Failed to upload to server');
console.error('XMLHttpRequest failed', error);
};
request.upload.onabort = function(error) {
callback('Upload aborted.');
console.error('XMLHttpRequest aborted', error);
};
request.open('POST', url);
request.send(data);
}
Please make sure that your PHP server is running top over SSL (HTTPs)
Create a directory and name it uploadFiles
Create a sub-directory and name it uploads
Structure of the directories:
https://server.com/uploadFiles -> to upload files
https://server.com/uploadFiles/uploads -> to store files
index.php
Now create or upload following index.php file on this path: https://server.com/uploadFiles
<?php
// File Name: "index.php"
// via https://github.com/muaz-khan/RecordRTC/tree/master/RecordRTC-to-PHP
foreach(array('video', 'audio') as $type) {
if (isset($_FILES["${type}-blob"])) {
echo 'uploads/';
$fileName = $_POST["${type}-filename"];
$uploadDirectory = 'uploads/'.$fileName;
if (!move_uploaded_file($_FILES["${type}-blob"]["tmp_name"], $uploadDirectory)) {
echo(" problem moving uploaded file");
}
echo($fileName);
}
}
?>
Why sub-directory?
Nested directory uploads will be used to store your uploaded files. You will get URLs similar to this:
https://server.com/uploadFiles/uploads/filename.webm
Longer file upload issues:
https://github.com/muaz-khan/RecordRTC/wiki/PHP-Upload-Issues
upload_max_filesize MUST be 500MB or greater.
max_execution_time MUST be at least 10800 (or greater).
It is recommended to modify php.ini otherwise create .htaccess.
How to link my own server?
Simply replace https://webrtcweb.com/RecordRTC/ with your own URL i.e. https://server.com/uploadFiles/.

Example code to use GridFS using mongoskin to upload file from form

I am using mongoskin to connect mongodb in my project. Now I have requirement to use GridFs to upload images, audio etc. I have one HTML form to upload these files.
I tried to find out example code to upload file using mongoskin however could't find any good one.
Please help.
After spending many hours; I am able to use mongoskin to upload file to Gridfs. Not sure if this is perfect code however sharing it here because I couldn't find any single working code on searching Google :-)
https://github.com/dilipkumar2k6/gridfs-mongoskin
var DBModule = require('./DBModule.js');
var Grid = require('gridfs-stream');
var mongoskin = require('mongoskin');
//Upload file to server and also update the database
exports.uploadContent = function (req, res) {
console.log('Calling uploadFile inside FileUploadService');
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
console.log('uploadFile after busboy fieldname: ' + fieldname + ", file : " + file + ", filename : " + filename);
// make sure the db instance is open before passing into `Grid`
var gfs = Grid(DBModule.db, mongoskin);
//Get metadata var host = req.headers['host'];
var metadata = {contentType: mimetype};
var writestream = gfs.createWriteStream({filename: filename, metadata: metadata});
file.pipe(writestream);
writestream.on('close', function (file) {
// return URL to acces the uploaded content
var path = "contents/" + file._id;
res.json({"path": path});
});
writestream.on('error', function (err) {
log.error({err: err}, 'Failed to upload file to database');
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
});
});
};
//view file from database
exports.previewContent = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.contentid);
console.log('Calling previewFile inside FileUploadService for content id ' + contentId);
var gs = DBModule.db.gridStore(contentId, 'r');
gs.read(function (err, data) {
if (!err) {
//res.setHeader('Content-Type', metadata.contentType);
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
};
Try this to store the data using gridfs (by default uses mongoskin). It worked for me.
var ObjectID = require('mongodb').ObjectID,
GridStore = require('mongodb').GridStore;
exports.saveMedia = function(db, media, next) {
console.log(media)
db.open(function (err, db) {
// Create a file and open it
var gridStore = new GridStore(db, new ObjectID(), "w");
gridStore.open(function (err, gridStore) {
// Write some content to the file
gridStore.write(new Buffer(media), function (err, gridStore) {
// Flush the file to db
gridStore.close(function (err, fileData)
//returns filename
next(null, fileData)
});
});
});
});
}

Uploading large files to Amazon S3

I've manage to get the following script to work with smaller files. But when I try to upload files around 10MB or more, it says that it is completed but the file does not show up in my S3 bucket.
Any ideas why it will upload smaller files and not file 10MB or greater?
<?php
//System path for our website folder
define('DOCROOT', realpath(dirname(__FILE__)).DIRECTORY_SEPARATOR);
//URL for our website
define('WEBROOT', htmlentities(
substr($_SERVER['REQUEST_URI'], 0, strcspn($_SERVER['REQUEST_URI'], "\n\r")),
ENT_QUOTES
));
//Which bucket are we placing our files into
$bucket = 'bucket.mysite.com';
// This will place uploads into the '20100920-234138' folder in the $bucket bucket
$folder = date('Ymd-His').'/'; //Include trailing /
//Include required S3 functions
require_once DOCROOT."includes/s3.php";
//Generate policy and signature
list($policy, $signature) = S3::get_policy_and_signature(array(
'bucket' => $bucket,
'folder' => $folder,
));
?>
<script type="text/javascript">
$(document).ready(function() {
$("#file_upload").uploadify({
'uploader' : '<?= WEBROOT ?>files/uploadify/uploadify.swf',
'buttonText' : 'Browse',
'cancelImg' : '<?= WEBROOT ?>files/uploadify/cancel.png',
'script' : 'http://s3.amazonaws.com/<?= $bucket ?>',
'scriptAccess' : 'always',
'method' : 'post',
'scriptData' : {
"AWSAccessKeyId" : "<?= S3::$AWS_ACCESS_KEY ?>",
"key" : "${filename}",
"acl" : "authenticated-read",
"policy" : "<?= $policy ?>",
"signature" : "<?= $signature ?>",
"success_action_status" : "201",
"key" : encodeURIComponent(encodeURIComponent("<?= $folder ?>${filename}")),
"fileext" : encodeURIComponent(encodeURIComponent("")),
"Filename" : encodeURIComponent(encodeURIComponent(""))
},
'fileExt' : '*.*',
'fileDataName' : 'file',
'simUploadLimit' : 2,
'multi' : true,
'auto' : true,
'onError' : function(errorObj, q, f, err) { console.log(err); },
'onComplete' : function(event, ID, file, response, data) { console.log(file); }
});
});
</script>
<?php
class S3 {
public static $AWS_ACCESS_KEY = '< Your access key >';
public static $AWS_SECRET_ACCESS_KEY = '< Your secrete key >';
/*
* Purpose:
* Actionscript encodes '+' characters in the signature incorrectly - it makes
* them a space instead of %2B the way PHP does. This causes uploadify to error
* out on upload. This function recursively generates a new policy and signature
* until a signature without a + character is created.
* Accepts: array $data
* Returns: policy and signature
*/
public static function get_policy_and_signature( array $data )
{
$policy = self::get_policy_doc( $data );
$signature = self::get_signature( $policy );
if ( strpos($signature, '+') !== FALSE )
{
$data['timestamp'] = intval(#$data['timestamp']) + 1;
return self::get_policy_and_signature( $data );
}
return array($policy, $signature);
}
public static function get_policy_doc(array $data)
{
return base64_encode(
'{'.
'"expiration": "'.gmdate('Y-m-d\TH:i:s\Z', time()+60*60*24+intval(#$data['timestamp'])).'",'.
'"conditions": '.
'['.
'{"bucket": "'.$data['bucket'].'"},'.
'["starts-with", "$key", ""],'.
'{"acl": "authenticated-read"},'.
//'{"success_action_redirect": "'.$SWFSuccess_Redirect.'"},'.
'{"success_action_status": "201"},'.
'["starts-with","$key","'.str_replace('/', '\/', $data['folder'] ).'"],'.
'["starts-with","$Filename",""],'.
'["starts-with","$folder",""],'.
'["starts-with","$fileext",""],'.
'["content-length-range",0,5242880]'.
']'.
'}'
);
}
public static function get_signature( $policy_doc ) {
return base64_encode(hash_hmac(
'sha1', $policy_doc, self::$AWS_SECRET_ACCESS_KEY, true
));
}
}
Problem solved issue was with this line
'["content-length-range",0,5242880]'
I commented it out and it works as it should. with no limits in size.