React-native-fs : How to use readDir recursively using .map()? - react-native

I tried to get all the files and directories available in a folder using react-native-fs.
I created a function to get all the files and directories recursively in a folder, I call this function this way :
const data = await scanDir(path);
I first tried using the .map() function but my function return only some elements :
async function scanDir(pathOfDirToScan, data = {directory: [], files: []}) {
const readedFilesAndDir = await FS.readDir(pathOfDirToScan);
Object.keys(readedFilesAndDir).map(async key => {
if (readedFilesAndDir[key].isDirectory()) {
const directoryPath = pathOfDirToScan + '/' + readedFilesAndDir[key].name;
data.directory.push(directoryPath);
data = await scanDir(directoryPath, data);
} else {
data.files.push(pathOfDirToScan + '/' + readedFilesAndDir[key].name);
}
});
return data;
}
It seems my function return the data after the first time map is executed, but the function continue after that.
I then tried with a for loop and it works as intended :
async function scanDir(pathOfDirToScan, data = {directory: [], files: []}) {
const readedFilesAndDir = await FS.readDir(pathOfDirToScan);
for (let i = 0; i < readedFilesAndDir.length; i++) {
if (readedFilesAndDir[i].isDirectory()) {
const directoryPath = pathOfDirToScan + '/' + readedFilesAndDir[i].name;
data.directory.push(directoryPath);
data = await scanDir(directoryPath, data);
} else {
data.files.push(pathOfDirToScan + '/' + readedFilesAndDir[i].name);
}
}
return data;
}
What should I do to make the function properly works using .map() ?

The FS.readDir(dirpath) returns an array of objects as per docs. Object.keys(obj) is not required for iteration in that case, just readedFilesAndDir.map() will do your task.
Copy and pasted your own code with some corrections. Hope, it helps:
async function scanDir(pathOfDirToScan, data = {directory: [], files: []}) {
const readedFilesAndDir = await FS.readDir(pathOfDirToScan);
readedFilesAndDir.map(async eachItem=> {
if (eachItem.isDirectory()) {
const directoryPath = pathOfDirToScan + '/' + eachItem.name;
data.directory.push(directoryPath);
data = await scanDir(directoryPath, data);
} else {
data.files.push(pathOfDirToScan + '/' + eachItem.name);
}
});
return data;
}

Related

Cloudflare ESI worker / TypeError: Body has already been used

I'm trying to use a CloudFlare worker to manage my backend ESI fragments but i get an error:
Uncaught (in promise) TypeError: Body has already been used. It can only be used once. Use tee() first if you need to read it twice.
Uncaught (in response) TypeError: Body has already been used. It can only be used once. Use tee() first if you need to read it twice.
I don't find where the body has already been used
The process is:
get a response with the parts
Transform the body by replacing parts fragments with sub Backend calls (streamTransformBody function)
return the response
addEventListener("fetch", event => {
event.respondWith(handleRequest(event.request))
});
const esiHeaders = {
"user-agent": "cloudflare"
}
async function handleRequest(request) {
// get cookies from the request
if(cookie = request.headers.get("Cookie")) {
esiHeaders["Cookie"] = cookie
console.log(cookie)
}
// Clone the request so that it's no longer immutable
newRequest = new Request(request)
// remove cookie from request
newRequest.headers.delete('Cookie')
// Add header to get <esi>
newRequest.headers.set("Surrogate-Capability", "abc=ESI/1.0")
console.log(newRequest.url);
const response = await fetch(newRequest);
let contentType = response.headers.get('content-type')
if (!contentType || !contentType.startsWith("text/")) {
return response
}
// Clone the response so that it's no longer immutable
const newResponse = new Response(response.body, response);
let { readable, writable } = new TransformStream()
streamTransformBody(newResponse.body, writable)
newResponse.headers.append('x-workers-hello', 'Hello from
Cloudflare Workers');
return newResponse;
}
async function streamTransformBody(readable, writable) {
const startTag = "<".charCodeAt(0);
const endTag = ">".charCodeAt(0);
let reader = readable.getReader();
let writer = writable.getWriter();
let templateChunks = null;
while (true) {
let { done, value } = await reader.read();
if (done) break;
while (value.byteLength > 0) {
if (templateChunks) {
let end = value.indexOf(endTag);
if (end === -1) {
templateChunks.push(value);
break;
} else {
templateChunks.push(value.subarray(0, end));
await writer.write(await translate(templateChunks));
templateChunks = null;
value = value.subarray(end + 1);
}
}
let start = value.indexOf(startTag);
if (start === -1) {
await writer.write(value);
break;
} else {
await writer.write(value.subarray(0, start));
value = value.subarray(start + 1);
templateChunks = [];
}
}
}
await writer.close();
}
async function translate(chunks) {
const decoder = new TextDecoder();
let templateKey = chunks.reduce(
(accumulator, chunk) =>
accumulator + decoder.decode(chunk, { stream: true }),
""
);
templateKey += decoder.decode();
return handleTemplate(new TextEncoder(), templateKey);
}
async function handleTemplate(encoder, templateKey) {
const linkRegex = /(esi:include.*src="(.*?)".*\/)/gm
let result = linkRegex.exec(templateKey);
let esi
if (!result) {
return encoder.encode(`<${templateKey}>`);
}
if (result[2]) {
esi = await subRequests(result[2]);
}
return encoder.encode(
`${esi}`
);
}
async function subRequests(target){
target = esiHost + target
const init = {
method: 'GET',
headers: esiHeaders
}
let response = await fetch(target, init)
if (!response.ok) {
return ''
}
let text = await response.text()
return '<!--esi-->' + text + '<!--/esi-->'
}

How to wait until all async calls are finished

I've got NestJS application which interact with YoutubeAPI and load videos from it.
One particular method is important and it's loadVideos from below. Method it self has multiple asyncs inside and I need to work with videoIdMap property once everything is finished
private loadVideos(
playListId: string,
channel: Channel,
nextPageToken: string,
stopLoadingOnVideoId: string,
) {
const baseUrl = YoutubeService.VIDEO_URL_SNIPPET_BY_ID + playListId;
const response = this.httpService
.get(nextPageToken ? baseUrl + '&pageToken=' + nextPageToken : baseUrl)
.pipe(map((response) => response.data));
response.subscribe((data) => {
data.items.forEach((item) => {
if (stopLoadingOnVideoId && item.snippet.resourceId.videoId === stopLoadingOnVideoId) {
return;
}
this.prepareVideoEntity(item.snippet, channel).then((partialVideo) =>
this.videoService.create(partialVideo).then((video) => {
this.videoIdMap[video.youtubeId] = video.id;
}),
);
});
if (data.nextPageToken) {
this.loadVideos(
playListId,
channel,
data.nextPageToken,
stopLoadingOnVideoId,
);
}
});
}
Ideal solution for me would be to make loadVideos async somehow so I can later do:
public methodWhichCallLoadVideos(): void {
await loadVideos(playListId, channel, null, stopLoadingOnVideoId)
// My code which have to be executed right after videos are loaded
}
Every solution I tried out end up with this.videoIdMap to be empty object or with compilation issue so any idea is more than welcome.
You could switch to promises instead of Observables, thus turning the method into an async one that recurs as long as data has a nextPageToken:
private async loadVideos(
playListId: string,
channel: Channel,
nextPageToken: string,
stopLoadingOnVideoId: string,
) {
const baseUrl = YoutubeService.VIDEO_URL_SNIPPET_BY_ID + playListId;
const response = await this.httpService
.get(nextPageToken ? url + '&pageToken=' + nextPageToken : url).toPromise();
const { data } = response;
for (const item of data.items) {
if (stopLoadingOnVideoId && item.snippet.resourceId.videoId === stopLoadingOnVideoId) {
continue;
}
const partialVideo = await this.prepareVideoEntity(item.snippet, channel);
const video = await this.videoService.create(partialVideo)
this.videoIdMap[video.youtubeId] = video.id;
}
if (data.nextPageToken) {
await this.loadVideos(
playListId,
channel,
data.nextPageToken,
stopLoadingOnVideoId,
);
}
}
In your caller you can then simply await loadVideos(...):
private async initVideoIdMap(...) {
await this.loadVideos(...);
// this.videoIdMap should be correctly populated at this point
}

rxjs, call next on generator only after previous operation completes

I'm uploading a file by creating slices of file using Blob.slice() in a generator function
export function* chunkFile(file: File, chunkSize: number) {
let chunkStart = 0;
const _chunkEnd = chunkStart + chunkSize;
let chunkEnd = _chunkEnd > file.size ? file.size : _chunkEnd;
while (chunkStart < file.size) {
yield <ChunkType>{
chunk: file.slice(chunkStart, chunkEnd),
start: chunkStart,
end: chunkEnd
};
chunkStart = chunkEnd;
const _chunkEndIn = chunkStart + chunkSize;
chunkEnd = _chunkEndIn > file.size ? file.size : _chunkEndIn;
}
}
and I'm uploading file like this
Observable.from(chunckFile(file,chunkSize)).concatMap(uploadRoutine).subscribe();
But all chunks are created at same time.
what I need is create new chunck (call next on generator) only when current chunk upload completes.
Found Solution by my own
export function rxIterable<T, R>(source: Iterator<T>, consumer: (value: T) => Observable<R>) {
const first = source.next();
if (first.done) {
return empty<R>();
}
return consumer(first.value).pipe(
expand(() => {
const next = source.next();
if (next.done) {
return empty<R>();
}
return consumer(next.value);
}),
finalize(() => source.return())
);
}

Express Deprecated

I have a photo app that uploads photos to AWS. When testing the uploading photos feature on my localhost, my terminal throws the following error:
express deprecated res.send(status, body): Use
res.status(status).send(body) instead aws/aws.js:50:18
My photos DO save to AWS, im just wondering what this error is and how to fix it. Below is my aws code that the error refers too.
'use strict';
var AWS = require('aws-sdk'),
crypto = require('crypto'),
config = require('./aws.json'),
createS3Policy,
getExpiryTime;
getExpiryTime = function () {
var _date = new Date();
return '' + (_date.getFullYear()) + '-' + (_date.getMonth() + 1) + '-' +
(_date.getDate() + 1) + 'T' + (_date.getHours() + 3) + ':' + '00:00.000Z';
};
createS3Policy = function(contentType, callback) {
var date = new Date();
var s3Policy = {
'expiration': getExpiryTime(),
'conditions': [
['starts-with', '$key', 'images/'],
{'bucket': config.bucket},
{'acl': 'public-read'},
['starts-with', '$Content-Type', contentType],
{'success_action_status' : '201'}
]
};
// stringify and encode the policy
var stringPolicy = JSON.stringify(s3Policy);
var base64Policy = new Buffer(stringPolicy, 'utf-8').toString('base64');
// sign the base64 encoded policy
var signature = crypto.createHmac('sha1', config.secretAccessKey)
.update(new Buffer(base64Policy, 'utf-8')).digest('base64');
// build the results object
var s3Credentials = {
s3Policy: base64Policy,
s3Signature: signature,
AWSAccessKeyId: config.accessKeyId
};
// send it back
callback(s3Credentials);
};
exports.getS3Policy = function(req, res) {
createS3Policy(req.query.mimeType, function (creds, err) {
if (!err) {
return res.send(200, creds);
} else {
return res.send(500, err);
}
});
};
Replace res.send(statusCode, "something") with res.status(statusCode).send("something")
This should do it for your code:
exports.getS3Policy = function(req, res) {
createS3Policy(req.query.mimeType, function (creds, err) {
if (!err) {
return res.send(creds); //200 is not needed here, express will default to this
} else {
return res.status(500).send(err);
}
});
};

Large file upload and download ASP .NET MVC4

Can anybody give me example of code for uploading and downloading different type of files by Using Custom Control in ASP .NET MVC4 by using PlUpload Plugin. I want to save files for my task, message with unique Ids in database and want to retrieve them too. Here is my code that I tried for uploading
server side
public ActionResult UploadFiles(string id)
{
for (int i = 0; i < Request.Files.Count; i++)
{
var file = Request.Files[i];
file.SaveAs(AppDomain.CurrentDomain.BaseDirectory + "Uploads/" + file.FileName);
}
return Json(new { success = true }, JsonRequestBehavior.AllowGet);
}
and for plupload plugin code for client side for uploading file is
$("#file_attachments").pluploadQueue(
{
// General settings
runtimes: 'html5,flash,silverlight',
url: '/SideMenuBar/UploadFiles',
max_file_size: '100mb',
chunk_size: '1mb',
unique_names: true,
multipart: true,
// Specify what files to browse for
filters: [
{ title: "Image files", extensions: "jpg,gif,png" },
{ title: "Zip files", extensions: "zip" },
{ title: "Rar files", extensions: "rar" },
{ title: "Document files", extensions: "docx,doc,xlx,xlxs,ppt" },
],
// Flash settings
flash_swf_url: 'Script/lib/plupload/js/plupload.flash.swf',
// Silverlight settings
silverlight_xap_url: 'Script/lib/plupload/js/plupload.silverlight.xap',
// PreInit events, bound before any internal events
preinit: {
Init: function (up, info) {
//alert('[Init]'+ info+ 'Features:'+ up.features);
},
UploadFile: function (up, file) {
// alert('[UploadFile]', file);
// You can override settings before the file is uploaded
up.settings.url = '/SideMenuBar/UploadFiles?id=' + file.id;
//up.settings.multipart_params = {param1: 'value1', param2: 'value2'};
}
},
// Post init events, bound after the internal events
init:
UploadComplete: function (up, files) {
// destroy the uploader and init a new one
up.destroy();
}
}
});
var uploader = $('#file_attachments').pluploadQueue();
uploader.bind('FileUploaded', function (upldr, file, object) {
if (uploader.files.length == (uploader.total.uploaded + uploader.total.failed)) {
$(".file_upload_cancel").hide();
$(".file_upload_done").show();
}
});
uploader.bind("FilesAdded", function (up, filesToBeAdded) {
if (up.files.length > 5) {
up.files.splice(4, up.files.length - 5);
showStatus("Only 5 files max are allowed per upload. Extra files removed.", 3000, true);
return false;
}
return true;
});
$('.upload_files').click(function (e) {
e.preventDefault();
$(".file_up").show();
});
$('#new_message_form').submit(function (e) {
var uploader = $('#file_attachments').pluploadQueue();
// Files in queue upload them first
if (uploader.files.length > 0) {
// When all files are uploaded submit form
uploader.bind('StateChanged', function () {
if (uploader.files.length === (uploader.total.uploaded + uploader.total.failed)) {
//uncoment next line to submit form after all files are uploaded
//$('#new_message_form')[0].submit();
}
});
uploader.start();
}
return false;
});
}
How can I resolve problem
You are using an option called "chunk", which divides your file in the size of the chunk - it's a good practice, to prevent errors.
You have determined it with the property "chunk_size". For example: you have a file of 5mb. When you upload, you'll have 5 parts of 1mb - until the upload is complete. Then, you'll have to put them together.
I recommend you to see this link to more informations about chunk and how to make it work.
Here is an example of one of my implementation - with MVC 3 - of plupload with chunk.
I'll post the javascript code and the action. I think it will be good for you to know how to implement in your case.
function installFolderFileUploader(action, id, ProfileType, intMaxFilesPermitted, Folder, maxSizeMB) {
var uploaderRuntimes = 'html5, flash, silverlight';
var uploader = new plupload.Uploader({
runtimes: uploaderRuntimes,
browse_button: 'imgBtnPhotoUpload',
url: action,
flash_swf_url: '/Scripts/Plugins/Moxie.swf',
silverlight_xap_url: '/Scripts/Plugins/Moxie.xap',
multipart_params: { 'id': id, 'ProfileType': ProfileType },
multi_selection: true,
max_file_count: '5',
chunk_size: '100KB',
filters: {
max_file_size: maxSizeMB + 'MB'
},
init: {
FileUploaded: function (Up, File, Response) {
var jsonObj = jQuery.parseJSON(Response.response);
if (jsonObj.success) {
mountFileUploadFields(jsonObj, Folder, ProfileType);
}
},
PostInit: function () {
//meow
$('#imgBtnPhotoUpload').next().css({ 'top': '0', 'width': '146px', 'height': '28px', 'cursor': 'pointer' });
},
FilesAdded: function (up, files) {
var totalInPage = parseInt($('#dvFileContainer .BeeFileDetails').length);
if ((up.files.length + totalInPage) > parseInt(intMaxFilesPermitted)) {
jQuery.facebox({ div: "#dvMaxFilesPermitedError" });
up.splice();
up.refresh();
return false;
}
else {
if (totalInPage >= parseInt(intMaxFilesPermitted)) {
jQuery.facebox({ div: "#dvMaxFilesPermitedError" });
up.splice();
up.refresh();
return false;
}
else {
$('#dvFileList').css('margin-left', '2px');
$('#dvFileList').css('font-size', '10px');
$('#dvFileList').css('display', 'block');
plupload.each(files, function (file) {
$('#dvFileList').append('<div>');
$('#dvFileList').append('<div style="width:84%;margin-left:30px;float:left;" id="' + file.id + '">' + file.name + ' (' + plupload.formatSize(file.size) + ')<b></b></div>');
$('#dvFileList').append('<img class="removeFile" style="margin-top:2px;cursor:pointer;" src="/Content/images/cancel.png" id="' + file.id + '" />');
$('#dvFileList').append('</div>');
$('.removeFile').on('click', function () {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
uploader.stop();
uploader.splice();
});
});
uploader.start();
}
}
},
UploadProgress: function (up, file) {
if (file.percent == 100) {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
}
$('#' + file.id + ' b:eq(0)').html('<span> - ' + file.percent + '%</span>');
$('#' + file.id + ' b:eq(0)').append('<div id="fileUploaded" style="background-color:#0099FF;height:3px;width:' + file.percent + '%";></div>');
},
ChunkUploaded: function (up, file, info) {
var jsonObj = jQuery.parseJSON(info.response);
if (jsonObj.tempFile != "") {
uploader.settings.multipart_params.tempFile = jsonObj.tempFile;
}
else {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
var totalInPage = parseInt($('#dvFileContainer .BeeFileDetails').length);
if (totalInPage > 0)
$('.BeeEditFileActions').fadeIn();
var fileName = uploader.settings.multipart_params.tempFile;
removeNonUsedFiles(id, fileName, 'File');
uploader.settings.multipart_params.tempFile = '';
uploader.stop();
uploader.splice();
uploader.refresh();
jQuery.facebox({ div: "#dvAddFolderFileError" });
}
},
Error: function (up, err) {
if (err.code != '-500')
jQuery.facebox({ div: "#dvAddFolderFileError" });
},
UploadComplete: function (a, Response) {
$('.BeeEditFileActions').fadeIn();
$('#dvFileList').empty();
uploader.splice();
uploader.refresh();
}
}
});
uploader.init();
}
And the Action:
[AllowAnonymous]
[HttpPost]
public JsonResult UploadFolderFile(string id, Domain.Profile.TypeProfile ProfileType, string tempFile, string name, int? chunk, int? chunks)
{
String strTempFile = string.Empty;
String strSaveLocation = string.Empty;
try
{
var fileData = Request.Files[0];
chunk = chunk ?? 0;
String strExtension = Path.GetExtension(name).ToLower();
Models.Identity.CustomIdentity objUser = new Models.Identity.CustomIdentity(System.Web.Security.FormsAuthentication.Decrypt(id));
DB.CompanyNetworkDB objCompanyDB = new DB.CompanyNetworkDB();
Int32 intMaxFileSize = objCompanyDB.getFileInFolderMaxSize(objUser.CompanyNetworkID) * 1024 * 1024;
if (objUser != null && objUser.IsAuthenticated && fileData.ContentLength <= intMaxFileSize)
{
////Get upload file.
String strSaveLocationURL = Domain.Profile.getUploadItemsFolder(objUser.CompanyNetworkID, ProfileType, Domain.Profile.UploadType.Folder);
strSaveLocationURL += "temp/";
strSaveLocation = Server.MapPath(strSaveLocationURL);
strTempFile = string.IsNullOrEmpty(tempFile) ? DateTime.Now.Ticks.ToString() + strExtension : tempFile;
long fileSize = 0;
using (var fs = new FileStream(Path.Combine(strSaveLocation, strTempFile), chunk == 0 ? FileMode.Create : FileMode.Append))
{
var buffer = new byte[fileData.InputStream.Length];
fileData.InputStream.Read(buffer, 0, buffer.Length);
fs.Write(buffer, 0, buffer.Length);
fileSize = fs.Length;
}
if (fileSize <= intMaxFileSize)
{
if (chunk == chunks - 1)
{
return Json(new { success = true, OriginalFileName = Path.GetFileName(name), ServerFileName = strTempFile, SizeMB = fileSize });
}
else
{
return Json(new { success = true, tempFile = strTempFile });
}
}
else
{
return Json(new { success = false });
}
}
else
{
return Json(new { success = false });
}
}
catch (ArgumentOutOfRangeException)
{
System.IO.File.Delete(Path.Combine(strSaveLocation, strTempFile));
return Json(new { success = false, erro = "canceled" });
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
}
I think it could help.