Aurelia startup error - Invalid resource path: function Compose - aurelia

After upgrading my Aurelia Framework in my application via:
"aurelia-bootstrapper": "^2.3.0"
I'm getting error at the application startup (stack trace at the bottom).
Returning to:
"aurelia-bootstrapper": "2.2.0"
fixes the error. In release notes for version of Aurelia in question, there is note about additional functionality of setRoot method, but nothing that could be breaking change Aurelia Release Notes - June 2018.
I just use default:
aurelia.start().then(() => aurelia.setRoot());
This is details stack trace of startup error:
vendor-bundle.js:1398 Unhandled rejection Error: Invalid resource path [function Compose(element, container, compositionEngine, viewSlot, viewResources, taskQueue) {
_initDefineProp(this, 'model', _descriptor, this);
_initDefineProp(this, 'view', _descriptor2, this);
_initDefineProp(this, 'viewModel', _descriptor3, this);
_initDefineProp(this, 'swapOrder', _descriptor4, this);
this.element = element;
this.container = container;
this.compositionEngine = compositionEngine;
this.viewSlot = viewSlot;
this.viewResources = viewResources;
this.taskQueue = taskQueue;
this.currentController = null;
this.currentViewModel = null;
this.changes = Object.create(null);
}]. Resources must be specified as relative module IDs.
at FrameworkConfiguration.globalResources (http://localhost/scripts/vendor-bundle.js:69284:17)
at Object.configure (http://localhost/scripts/vendor-bundle.js:62319:12)
at http://localhost/scripts/vendor-bundle.js:69140:36
From previous event:
at _loadPlugin (http://localhost/scripts/vendor-bundle.js:69138:42)
at http://localhost/scripts/vendor-bundle.js:69131:16
From previous event:
at loadPlugin (http://localhost/scripts/vendor-bundle.js:69130:75)
at next (http://localhost/scripts/vendor-bundle.js:69392:20)
From previous event:
at next (http://localhost/scripts/vendor-bundle.js:69392:56)
at http://localhost/scripts/vendor-bundle.js:69399:16
From previous event:
at FrameworkConfiguration.apply (http://localhost/scripts/vendor-bundle.js:69384:44)
at Aurelia.start (http://localhost/scripts/vendor-bundle.js:69000:39)
at Object.<anonymous> (http://localhost/scripts/app-bundle.js:5811:33)
at step (http://localhost/scripts/app-bundle.js:51:23)
at Object.next (http://localhost/scripts/app-bundle.js:32:53)
at fulfilled (http://localhost/scripts/app-bundle.js:23:58)
From previous event:
at step (http://localhost/scripts/app-bundle.js:25:124)
at http://localhost/scripts/app-bundle.js:26:9
From previous event:
at __awaiter (http://localhost/scripts/app-bundle.js:22:12)
at Object.configure (http://localhost/scripts/app-bundle.js:5793:16)
at http://localhost/scripts/vendor-bundle.js:70930:29
From previous event:
at config (http://localhost/scripts/vendor-bundle.js:70925:56)
at http://localhost/scripts/vendor-bundle.js:70961:14
From previous event:
at bootstrap (http://localhost/scripts/vendor-bundle.js:70960:26)
at http://localhost/scripts/vendor-bundle.js:70947:9
From previous event:
at run (http://localhost/scripts/vendor-bundle.js:70942:61)
at Object.<anonymous> (http://localhost/scripts/vendor-bundle.js:70967:37)
at Object.execCb (http://localhost/scripts/vendor-bundle.js:8724:33)
at Module.check (http://localhost/scripts/vendor-bundle.js:7911:51)
at Module.enable (http://localhost/scripts/vendor-bundle.js:8204:22)
at Object.enable (http://localhost/scripts/vendor-bundle.js:8585:39)
at Module.<anonymous> (http://localhost/scripts/vendor-bundle.js:8189:33)
at http://localhost/scripts/vendor-bundle.js:7162:23
at each (http://localhost/scripts/vendor-bundle.js:7087:31)
at Module.enable (http://localhost/scripts/vendor-bundle.js:8141:17)
at Module.init (http://localhost/scripts/vendor-bundle.js:7816:26)
at http://localhost/scripts/vendor-bundle.js:8488:36
printWarning # vendor-bundle.js:1398

Related

I can't run the node.js server using keystone.js framework

I have source code using keystone.js, and I can't run it because of a Mongodb connection error.
This is the code creating Keystone.
const keystone = new Keystone({
name: process.env.PROJECT_NAME,
adapter: new Adapter({dbName}),
mongo: 'mongodb://127.0.0.1:27017/',
sessionStore: new MongoStore({ url: 'mongodb://localhost/' }),
cookieSecret: 'process.env.COOKIE_SECRET',
appVersion: {
version: '1.0.0',
addVersionToHttpHeaders: false,
access: false,
},
cookie: {
secure: false,
maxAge: 1000 * 60 * 60 * 24 * 30, // 30 days
sameSite: false
}
});
...
await keystone.connect()
Here are the error details:
(node:9928) UnhandledPromiseRejectionWarning: Error: No MongoDB connection URI specified.
at resolveAllKeys (E:\Node.JS\frostbets-master-2-20210809T155720Z-001\frostbets-master-2\keystone\node_modules\#keystonejs\utils\dist\utils.cjs.dev.js:51:19)
at processTicksAndRejections (internal/process/task_queues.js:93:5)
at async Keystone.connect (E:\Node.JS\frostbets-master-2-20210809T155720Z-001\frostbets-master-2\keystone\node_modules\#keystonejs\keystone\lib\Keystone\index.js:450:5)
(Use `node --trace-warnings ...` to show where the warning was created)
(node:9928) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag `--unhandled-rejections=strict` (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 2)
(node:9928) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
------------------------------------------------------------------------------------------
You haven't specified but it looks like you're on Keystone 5 so I'm going with that assumption.
There are a number of issue in the code you've posted:
The main problem you have is you're passing dbName to init you Adapter but you should be passing the full mongoUri.
That's the source of the specific error you're getting.
Pretty sure dbName was an option at one point but not in the current release of KS5.
Again, not sure which version you're actually on but if you've updated some packages in an older project, that might be it.
I'm not sure what the mongo key being passed in the Keystone config but I don't think it's valid.
Any config for Mongo (for the main DB) should be passed to the adapter.
The syntax you're using to create your MongoStore instance has been deprecated for the current version of that package.
If it works for you leave it, but in the code below I've used the more recent MongoStore.create() syntax.
In your code you have mongodb://127.0.0.1:27017/ (under the mongo key, which I think is ignored) and mongodb://localhost/ for the sessionStore.
For most systems this will refer to the same DB though it's not clear if that's intentional in your case.
In my code I've put the sessions in a separate DB (my-app-sessions) but that's optional.
Your config uses the literal string 'process.env.COOKIE_SECRET' as the cookie secret, not the value in the COOKIE_SECRET environment var.
This is almost certainly not what you want.
To solve these problems, you probably want something close to this:
const { Keystone } = require('#keystonejs/keystone');
const { GraphQLApp } = require('#keystonejs/app-graphql');
const { AdminUIApp } = require('#keystonejs/app-admin-ui');
const MongoStore = require('connect-mongo');
const { MongooseAdapter: Adapter } = require('#keystonejs/adapter-mongoose');
const keystone = new Keystone({
name: process.env.PROJECT_NAME,
adapter: new Adapter({ mongoUri: 'mongodb://localhost/my-app' }),
sessionStore: MongoStore.create({ mongoUrl: 'mongodb://localhost/my-app-sessions' }),
cookieSecret: process.env.COOKIE_SECRET,
appVersion: {
version: '1.0.0',
addVersionToHttpHeaders: false,
access: false,
},
cookie: {
secure: false,
maxAge: 1000 * 60 * 60 * 24 * 30, // 30 days
sameSite: false
}
});
// ...
module.exports = {
keystone,
apps: [new GraphQLApp(), new AdminUIApp({ name: process.env.PROJECT_NAME, enableDefaultRoute: true })],
};
In this code I've left the standard exports at the bottom rather than calling keystone.connect() directly so I can run it with yarn keystone dev.
Tested with..
"dependencies": {
"#keystonejs/adapter-mongoose": "^11.2.2",
"#keystonejs/app-admin-ui": "^7.5.2",
"#keystonejs/app-graphql": "^6.3.2",
"#keystonejs/keystone": "^19.3.3",
"connect-mongo": "^4.4.1"
}

Reference Error with module exports event discord.js

I am very confused by this error. In my code I have /events/ and /commands/ this is in /events/ I made sure that everything else works but this I can't figure out. The code down below is meant to work as a server count for a website. Anybody know? Thank you.
const Discord = require('discord.js');
var db = require('mysql');
var con = db.createConnection({
host: "localhost",
user: "",
password: "",
database: ""
});
module.exports = (client, guildCreate) => {
//welcome embed
const welcomeEmbed = new Discord.MessageEmbed()
.setColor('#858884')
.setTitle('Hello!')
.setAuthor('DHL Bot', 'https://cdn.discordapp.com/app-icons/708717412391845988/967e1b05f7b8aeca1d6b4649dc5530c8.png')
.setDescription(`Hello I am DHL, I am devoloped by Den#0762. Please do !setup. For support you can join our support server https://discord.com/invite`)
.setTimestamp()
.setFooter('By: Den#0762', 'https://cdn.discordapp.com/avatars/407206318911258628/e972b589e0ea4c45064d39b0380d77fd.png')
guild.owner.send(welcomeEmbed)
//db for scount
con.connect(function(err) {
if (err) throw err;
var sql = "UPDATE scount SET servercount = servercount + 1";
con.query(sql, function (err, result) {
console.log(result.affectedRows + " new server");
});
});
}
(node:1949) UnhandledPromiseRejectionWarning: ReferenceError: guild is not defined
at module.exports (/root/dc/DHL/events/guildCreate.js:23:1)
at Client.emit (events.js:315:20)
at Object.module.exports [as GUILD_CREATE] (/root/node_modules/discord.js/src/client/websocket/handlers/GUILD_CREATE.js:33:14)
at WebSocketManager.handlePacket (/root/node_modules/discord.js/src/client/websocket/WebSocketManager.js:386:31)
at WebSocketShard.onPacket (/root/node_modules/discord.js/src/client/websocket/WebSocketShard.js:436:22)
at WebSocketShard.onMessage (/root/node_modules/discord.js/src/client/websocket/WebSocketShard.js:293:10)
at WebSocket.onMessage (/root/node_modules/ws/lib/event-target.js:125:16)
at WebSocket.emit (events.js:315:20)
at Receiver.receiverOnMessage (/root/node_modules/ws/lib/websocket.js:797:20)
at Receiver.emit (events.js:315:20)
(Use `node --trace-warnings ...` to show where the warning was created)
(node:1949) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag `--unhandled-rejections=strict` (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 1)
(node:1949) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
You're referencing guild, but it isn't declared. Based on other code I've seen, I think you need something along these lines:
module.exports = (client, guildCreate) => {
//welcome embed
const welcomeEmbed = new Discord.MessageEmbed() [...]
client.on("guildCreate", guild => {
guild.owner.send(welcomeEmbed)
});
[...]

Scheduling localhost serverless cron: The model could not be resolved in registry

I'm hosting an ExpressJS/NodeJS API on AWS Lambda with Serverless framework. The API uses Knex.js and Bookshelf.js ORM.
I want to test scheduling a cron job locally. I'm using serverless-offline-scheduler to do this.
Question: My API runs fine if I call it from my client, but if I call a function via serverless scheduler, it complains that no models are in the registry. Why is this? I've already definitely included all necessary Models at the top of the OrderService.js file.
{
"errorMessage": "The model User could not be resolved from the registry plugin.",
"errorType": "Error",
"stackTrace": [
"Error: The model User could not be resolved from the registry plugin.",
" at new ModelNotResolved (/Users/danielturcotte/Sites/d2c/api_v4/node_modules/bookshelf/lib/plugins/registry.js:70:133)",
Serverless.yml:
functions:
app:
handler: handler.handler
events: ...
dequeue:
handler: ./services/OrderService.dequeue // Call dequeue function
events:
- schedule: rate(1 minute)
The handler calls root/services/OrderService.dequeue function, which contains
...
const dequeue = async function() {
await checkQueuedOrders();
};
module.exports = {
dequeue,
};
In my knexService.js file, I register Bookshelf models to the registry to remove circular dependencies:
const knexfile = require('./knexfile');
const config = require('./environment');
const environment = config.env.NODE_ENV || 'development';
const knex = require('knex')(knexfile[environment]);
knex.client.pool.numPendingCreates();
const bookshelf = require('bookshelf')(knex);
bookshelf.plugin('registry'); // Resolve circular dependencies with relations
bookshelf.plugin('visibility');
bookshelf.plugin('pagination');
module.exports.knex = knex;
module.exports.bookshelf = bookshelf;

Loopback error - value is not an object

I am using loopback in backend. I am getting this error
Unhandled error for request POST /api/meetups/auth: Error: Value is not an object.
at errorNotAnObject (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/types/object.js:80:13)
at Object.validate (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/types/object.js:51:14)
at Object.fromTypedValue (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/types/object.js:14:22)
at Object.fromSloppyValue (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/types/object.js:41:17)
at HttpContext.buildArgs (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/http-context.js:193:22)
at new HttpContext (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/http-context.js:59:20)
at restStaticMethodHandler (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/rest-adapter.js:457:15)
at Layer.handle [as handle_request] (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/layer.js:95:5)
at next (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/route.js:137:13)
at Route.dispatch (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/route.js:112:3)
at Layer.handle [as handle_request] (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/layer.js:95:5)
at /Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/index.js:281:22
at Function.process_params (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/index.js:335:12)
at next (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/index.js:275:10)
at Function.handle
(/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/index.js:174:3)
at router (/Users/ankursharma/Documents/projects/meetupz/node_modules/express/lib/router/index.js:47:12)
I have already searched stackoverflow, but I didnt find answer. Basically, i was trying to use body-parser . I went through one of the stackoverflow thread and implemented its solution. I was able to use body-parser successfully. So, that error has been solved. But, now this error is giving me tough time.
server.js file
'use strict';
var loopback = require('loopback');
var boot = require('loopback-boot');
var bodyParser = require('body-parser');
var multer = require('multer');
var app = module.exports = loopback();
//code for body parsing
app.use(bodyParser.json()); // for parsing application/json
app.use(bodyParser.urlencoded({ extended: true })); // for parsing application/x-www-form-urlencoded
//app.use(multer()); // for parsing multipart/form-data
//code for body parsing ends
app.start = function() {
// start the web server
return app.listen(function() {
app.emit('started');
var baseUrl = app.get('url').replace(/\/$/, '');
console.log('Web server listening at: %s', baseUrl);
if (app.get('loopback-component-explorer')) {
var explorerPath = app.get('loopback-component-explorer').mountPath;
console.log('Browse your REST API at %s%s', baseUrl, explorerPath);
}
});
};
// Bootstrap the application, configure models, datasources and middleware.
// Sub-apps like REST API are mounted via boot scripts.
boot(app, __dirname, function(err) {
if (err) throw err;
// start the server if `$ node server.js`
if (require.main === module)
app.start();
});
In middleware.json, I have updated parse property as well
"parse": {"body-parser#json": {},
"body-parser#urlencoded": {"params": { "extended": true }}},
For some reason, that error has gone. Not sure, may be it will come again. But now, this is the error, I am seeing
Unhandled error for request POST /api/meetups/auth: TypeError: cb is not a function
at Function.Meetups.auth (/Users/ankursharma/Documents/projects/meetupz/common/models/meetups.js:117:3)
at SharedMethod.invoke (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/shared-method.js:270:25)
at HttpContext.invoke (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/http-context.js:297:12)
at phaseInvoke (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/remote-objects.js:677:9)
at runHandler (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/lib/phase.js:135:5)
at iterate (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:146:13)
at Object.async.eachSeries (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:162:9)
at runHandlers (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/lib/phase.js:144:13)
at iterate (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:146:13)
at /Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:157:25
at /Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:154:25
at execStack (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/remote-objects.js:522:7)
at RemoteObjects.execHooks (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/remote-objects.js:526:10)
at phaseBeforeInvoke (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/lib/remote-objects.js:673:10)
at runHandler (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/lib/phase.js:135:5)
at iterate (/Users/ankursharma/Documents/projects/meetupz/node_modules/strong-remoting/node_modules/loopback-phase/node_modules/async/lib/async.js:146:13)
check if your filters or your URL has black spaces i.e. %20, or anything wrong with the filter or url.
Loopback 3 makes a difference between array and object. You have to check your data type. See https://github.com/strongloop/strong-remoting/issues/360 for more information.
I was facing the same error but in my case it was due to a model property data length.The property was type object and a small dataLength which caused faulty record in my sql Model table.I have to Manually delete those faulty records and increase the dataLength of that property too.
Restart the app

Google Cloud Storage - Error during upload: gcs-resumable-upload.json renaming operation not permitted

I'm simply trying to follow this tutorial on how to upload files to gcs with Node and Express. But the following error keep causing my app to crash. Usually, I am able to upload one file without a problem in the first run. But I will get this error after running a few request, even with different file. When I try to upload, say 5, files at a time, this error cause my app to crash even in the first run. I see the process is trying to rename a file in the .config folder. Is it a normal behavior? If so, is there a work-around?
Window: v10.0.10586
Node: v4.3.1
Express: v4.13.1
Error: EPERM: operation not permitted, rename 'C:\Users\James Wang.config\configstore\gcs-resumable-upload.json.2873606827' -> 'C:\Users\James Wang.config\configstore\gcs-resumable-upload.json'
at Error (native)
at Object.fs.renameSync (fs.js:681:18)
at Function.writeFileSync as sync
at Object.create.all.set (C:\Users\James Wang\gi-cms-backend\node_modules\configstore\index.js:62:21)
at Object.Configstore.set (C:\Users\James Wang\gi-cms-backend\node_modules\configstore\index.js:93:11)
at Upload.set (C:\Users\James Wang\gi-cms-backend\node_modules\gcs-resumable-upload\index.js:264:20)
at C:\Users\James Wang\gi-cms-backend\node_modules\gcs-resumable-upload\index.js:60:14
at C:\Users\James Wang\gi-cms-backend\node_modules\gcs-resumable-upload\index.js:103:5
at Request._callback (C:\Users\James Wang\gi-cms-backend\node_modules\gcs-resumable-upload\index.js:230:7)
at Request.self.callback (C:\Users\James Wang\gi-cms-backend\node_modules\request\request.js:199:22)
at emitTwo (events.js:87:13)
at Request.emit (events.js:172:7)
at Request. (C:\Users\James Wang\gi-cms-backend\node_modules\request\request.js:1036:10)
at emitOne (events.js:82:20)
at Request.emit (events.js:169:7)
at IncomingMessage. (C:\Users\James Wang\gi-cms-backend\node_modules\request\request.js:963:12)
[nodemon] app crashed - waiting for file changes before starting...
UPDATE:
After setting {resumable: false} as suggested by #stephenplusplus in this post, I am no longer getting the "EPERM: operation not permitted" error.But, I start running into the { [ERROR:ETIMEDOUT] code: 'ETIMEDOUT', connection: false } error while trying to upload multiple files at a time with the largest file greater than 1.5mb. Other files get uploaded successfully.
For more information, I am able to upload files one by one when the files are no greater than ~2.5mb. If I try to upload 3 files at a time, I can only do so with files no greater than ~1.5mb.
Is the "Operation not permitted" issue as specified in the question a window specific thing, and does the timeout issue happen only after i set resumable = false?
I'm using express and multer with node.
This is the code I'm using now:
// Express middleware that will handle an array of files. req.files is an array of files received from
// filemulter.fields([{field: name, maxCount}]) function. This function should handle
// the upload process of files asychronously
function sendFilesToGCS(req, res, next) {
if(!req.files) { return next(); }
function stream(file, key, folder) {
var gcsName = Date.now() + file.originalname;
var gcsFile = bucket.file(gcsName);
var writeStream = gcsFile.createWriteStream({ resumable: false });
console.log(key);
console.log('Start uploading: ' + file.originalname);
writeStream.on('error', function(err) {
console.log(err);
res.status(501).send(err);
});
writeStream.on('finish', function() {
folder.incrementFinishCounter();
req.files[key][0].cloudStorageObject = gcsName;
req.files[key][0].cloudStoragePublicUrl = getPublicUrl(gcsName);
console.log('Finish Uploading: ' + req.files[key][0].cloudStoragePublicUrl);
folder.beginUploadNext();
});
writeStream.end(file.buffer);
};
var Folder = function(files) {
var self = this;
self.files = files;
self.reqFilesKeys = Object.keys(files); // reqFilesKeys is an array of keys parsed from req.files
self.nextInQuene = 0; // Keep track of the next file to be uploaded, must be less than reqFilesKeys.length
self.finishCounter = 0; // Keep track of how many files have been uploaded, must be less than reqFilesKeys.length
console.log(this.reqFilesKeys.length + ' files to upload');
};
// This function is used to initiate the upload process.
// It's also called in the on-finish listener of a file's write-stream,
// which will start uploading the next file in quene
Folder.prototype.beginUploadNext = function() {
// If there's still file left to upload,
if(this.finishCounter < this.reqFilesKeys.length) {
// and if there's still file left in quene
if(this.nextInQuene < this.reqFilesKeys.length) {
// upload the file
var fileToUpload = this.files[this.reqFilesKeys[this.nextInQuene]][0];
stream(fileToUpload, this.reqFilesKeys[this.nextInQuene], this);
// Increment the nextInQuene counter, and get the next one ready
this.nextInQuene++;
}
} else {
console.log('Finish all upload!!!!!!!!!!!!!!!!!!!!!!');
next();
}
};
Folder.prototype.incrementFinishCounter = function() {
this.finishCounter++;
console.log('Finished' + this.finishCounter + ' files');
};
var folder = new Folder(req.files);
// Begin upload with 3 streams
/*for(var i=0; i<3; i++) {
folder.beginUploadNext();
}*/
//Upload file one by one
folder.beginUploadNext();
}
I had the same issue with bower .. Run the following command: bower cache clean --allow-root
if this does not solve the problem, try after disabling anti virus.