I am trying to upload files to a dropbox app. Using the package CollectionFS/Meteor-CollectionFS with the cfs:dropbox adapter and my problem is that the files being uploaded is 0 bytes. I am not sure what I am missing or doing wrong here.
On server:
var registrationImageStorage = new FS.Store.Dropbox("registrationStorage", {
key: "****",
secret: "****",
token: "****",
transformWrite: function (fileObj, readStream, writeStream) {
gm(readStream, fileObj.name()).stream().pipe(writeStream);
}
});
RegistrationImages = new FS.Collection("registrations", {
stores: [registrationImageStorage],
filter: {
allow: {
contentTypes: ['image/*']
}
}
});
RegistrationImages.allow({
insert: function () {
return true;
},
update: function () {
return true;
}
});
On client:
var registrationImageStorage = new FS.Store.Dropbox("registrationStorage");
RegistrationImages = new FS.Collection("registrations", {
stores: [registrationImageStorage],
filter: {
allow: {
contentTypes: ['image/*']
}
}
});
On client to start the upload:
var file = new FS.File($('#badgeImage').get(0).files[0]);
RegistrationImages.insert(file, function (err, fileObj) {
if (err) {
console.log(err);
} else {
console.log(fileObj);
});
Ok, I did not need this part of the code and after removing it, it worked:
transformWrite: function (fileObj, readStream, writeStream) {
gm(readStream, fileObj.name()).stream().pipe(writeStream);
}
Related
I am new to Node.js (3 days total experience). I am using Node.js and the tedious package to query a database (azure SQL). I use the example as explained here: https://learn.microsoft.com/en-us/azure/azure-sql/database/connect-query-nodejs?tabs=macos
const connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on("connect", err => {
if (err) {
console.error(err.message);
} else {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request(
"SELECT * FROM clients",
(err, rowCount, columns) => {
if (err) {
console.error(err.message);
} else {
console.log(`${rowCount} row(s) returned`);
}
}
);
request.on("row", columns => {
columns.forEach(column => {
console.log("%s\t%s", column.metadata.colName, column.value);
});
});
connection.execSql(request);
}
});
I have two issues:
I do not know how to get the queried data into an object and
If I run the script it does print the items to the console, but it doesn't close the connection after it has done so. If I add a connection.close() at the bottom, it will close the connection before its done. I get the feeling that node.js executes everything at the same time (I am used to Python..).
Update
I found a way to close the connection, to my understanding the request object has several "events" that are predefined by the library. It seems I need to add the event "done" through request.on('done', ...) in order to make sure that it can even BE done. My updated code looks like this:
var connection = new Connection(config);
connection.connect(function(err) {
// If no error, then good to go...
executeStatement();
}
);
connection.on('debug', function(text) {
//remove commenting below to get full debugging.
//console.log(text);
}
);
function executeStatement() {
request = new Request("SELECT * FROM clients", function(err, rowCount) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
connection.close();
});
request.on('row', function(rows) {
_.forEach(rows, function(value, collection){
console.log(value)
console.log(value.value);
console.log(value.metadata.colName)
console.log(collection)
})
});
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
});
// In SQL Server 2000 you may need: connection.execSqlBatch(request);
connection.execSql(request);
}
Anyways, your help would be much appreciated!
Regards
Pieter
The package tedious is synchronous package, it uses the callback to return results. So when we call connection.close(), it will disable connection and stop the callback function. If will want to close the connection, I suggest you use async package to implement it.
For example
const { Connection, Request } = require("tedious");
const async = require("async");
const config = {
authentication: {
options: {
userName: "username", // update me
password: "password", // update me
},
type: "default",
},
server: "your_server.database.windows.net", // update me
options: {
database: "your_database", //update me
encrypt: true,
validateBulkLoadParameters: true,
},
};
const connection = new Connection(config);
let results=[]
function queryDatabase(callback) {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request("SELECT * FROM Person", (err, rowCount) => {
if (err) {
callback(err);
} else {
console.log(`${rowCount} row(s) returned`);
callback(null);
}
});
request.on("row", (columns) => {
let result={}
columns.forEach((column) => {
result[column.metadata.colName]=column.value
console.log("%s\t%s", column.metadata.colName, column.value);
});
// save result into an array
results.push(result)
});
connection.execSql(request);
}
function Complete(err, result) {
if (err) {
callback(err);
} else {
connection.close();
console.log("close connection");
}
}
connection.on("connect", function (err) {
if (err) {
console.log(err);
} else {
console.log("Connected");
// Execute all functions in the array serially
async.waterfall([queryDatabase], Complete);
}
});
connection.connect();
Besides, you also can use the package mssql. It supports asynchronous methods and depends on package tedious. We can directly call close after querying.
For example
const mssql = require("mssql");
const config = {
user: "username",
password: "password",
server: "your_server.database.windows.net",
database: "your_database",
options: {
encrypt: true,
enableArithAbort: true,
},
};
let pool = new mssql.ConnectionPool(config);
async function query() {
try {
await pool.connect();
const request = pool.request();
const result = await request.query("SELECT * FROM Person");
console.dir(result.recordset);
await pool.close();
console.log(pool.connected);
} catch (error) {
throw error;
}
}
query().catch((err) => {
throw err;
});
You can custom a class first and declare an Array to save ojects such as:
let sales = new Array();
class SalesLT{
constructor(catagryName,productName){
this.catagryName = catagryName;
this.productName = productName;
}
Here my sql statement returns 2 properties, so every time the loop takes out two elements from the ColumnValue[].
request.on("row", columns => {
for(let i=0; i<columns.length; i=i+2){
let sale = new SalesLT(columns[i].value,columns[i+1].value);
sales.push(sale);
}
sales.forEach( item => {
console.log("%s\t%s",item.catagryName, item.productName)
})
});
The code is as follows:
const { Connection, Request } = require("tedious");
let sales = new Array();
class SalesLT{
constructor(catagryName,productName){
this.catagryName = catagryName;
this.productName = productName;
}
}
// Create connection to database
const config = {
authentication: {
options: {
userName: "<***>", // update me
password: "<***>" // update me
},
type: "default"
},
server: "<****>.database.windows.net", // update me
options: {
database: "<***>", //update me
encrypt: true
}
};
const connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on ("connect", err => {
if (err) {
console.error(err.message);
} else {
queryDatabase();
}
});
function queryDatabase() {
console.log("Reading rows from the Table...");
// Read all rows from table
const request = new Request(
`SELECT TOP 2 pc.Name as CategoryName,
p.name as ProductName
FROM [SalesLT].[ProductCategory] pc
JOIN [SalesLT].[Product] p ON pc.productcategoryid = p.productcategoryid`,
(err, rowCount) => {
if (err) {
console.error(err.message);
} else {
console.log(`${rowCount} row(s) returned`);
}
connection.close();
}
);
request.on("row", columns => {
for(let i=0; i<columns.length; i=i+2){
let sale = new SalesLT(columns[i].value,columns[i+1].value);
sales.push(sale);
}
sales.forEach( item => {
console.log("%s\t%s",item.catagryName, item.productName)
})
});
connection.execSql(request);
}
this article should help you, to solve all the issues you are facing...which were the same I had when I started using Node :)
https://devblogs.microsoft.com/azure-sql/promises-node-tedious-azure-sql-oh-my/
today i trying to get some file upload with Hapi Js, i follow all Google Result with similarity of code.
this the code :
server.route({
method: "POST",
path: `${PUBLIC_URL}${THEME_URL}/create`,
handler: async (request: any, reply: ResponseToolkit) => {
console.log(request.payload.file, 'payload')
return reply.response(request.payload)
},
options: {
payload: {
output: 'stream',
allow: 'multipart/form-data',
parse: false,
}
}
})
with thats code i cant get request.payload my file or data, this is my request with postman:
post file with postman
enter image description here
i got undifined at request.payload.file
if i turn payload :{parse:true} i get unsuported media types
thanks for attention
If you are using the below version then you must be using the following syntax
#hapi/hapi: 18.x.x +
payload: {
parse: true,
multipart: {
output: 'stream'
},
maxBytes: 1000 * 1000 * 5, // 5 Mb
}
Also, you can also try using Joi to validate your payload.
{
method: 'POST',
path: '/upload',
options: {
payload: {
maxBytes: 209715200,
output: 'stream',
parse: true,
allow: 'multipart/form-data',
multipart: true // <-- this fixed the media type error
},
handler: async (req, reply) => {
try {
// await importData(req.payload)
// return reply.response("IMPORT SUCCESSFULLY")
const data = await req.payload;
// let final = await importFile(data)
// return reply.response("final", final)
if (data.file) {
let name = await data.file.hapi.filename;
console.log("FIlename: " + name);
let path = await __dirname + "/uploads/" + name;
let file = await fs.createWriteStream(path);
await data.file.pipe(file);
await data.file.on('end', async function (err) {
// var ret = {
// filename: data.file.hapi.filename,
// headers: data.file.hapi.headers
// }
if (typeof require !== 'undefined')
XLSX = require('xlsx');
const workbook = await XLSX.readFile(path);
var sheetName = workbook.SheetNames;
console.log("row======>>>>");
await sheetName.forEach(async () => {
let xlData = await XLSX.utils.sheet_to_json(workbook.Sheets[sheetName[0]]);
console.log("xlData", xlData);
for (let i = 0; i < xlData.length; i++) {
console.log("if condition", xlData[i].phone)
const userCheck = await getUserIdService({ where: { phone: xlData[i].phone } });
console.log("userCheck", userCheck.data)
console.log("test", !(userCheck.data === null));
if (!(userCheck.data === null)) {
console.log("finally ", userCheck.data?.phone)
await uploadUpdateService(xlData[i], { where: { phone: userCheck.data?.phone } });
// return finalUpdate
// return reply.response("updated")
}
else if (!xlData[i].customerID) {
await uploadCreate(xlData[i]);
// return finalCreate
}
}
})
})
}
} catch (err) {
console.log('Err----------------------' + err);
// error handling
return reply.response(Boom.badRequest(err.message, err))
// return reply.response(Boom.badRequest(err.message, err));
}
}
}
}
I have some problems with my multi-upload files code. I want to send my files object in post request but in the console but I get 0: (binary), 1: (binary)... instead of my files object. When I add console.log to my for ( var key in this.attachments ) everything is okey and variable attachments is not empty. Have you idea how solve it?
export default {
data() {
return {
body: null,
attachments: [],
form: new FormData
}
},
methods: {
filesChange(e) {
let selectedFiles = e.target.files;
if(!selectedFiles.length) {
return false;
}
for(let i=0; i<selectedFiles.length; ++i) {
this.attachments.push(selectedFiles[i]);
}
},
submit() {
for ( var key in this.attachments ) {
this.form.append(key, this.attachments[key]);
}
const config = { headers: { 'content-type': 'multipart/form-data' }}
this.form.append('body', this.body);
axios.post(`/api/question/${this.questionID}/reply`, this.form, config)
.then(res => {
})
}
}
}
I am using Meteor.js with Amazon S3 Bucket for uploading and storing photos.
I am using the CollectionFS package and Meteor-CollectionFS/packages/s3/.
However, there is no error or response displayed when I try to upload the file.
Client side event handler:
'change .fileInput': function(e, t) {
FS.Utility.eachFile(e, function(file) {
Images.insert(file, function (err, fileObj) {
if (err){
console.log(err);
} else {
console.log("fileObj id: " + fileObj._id);
//Meteor.users.update(userId, {$set: imagesURL});
}
});
});
}
Client side declaration
var imageStore = new FS.Store.S3("imageStore");
Images = new FS.Collection("images", {
stores: [imageStore],
filter: {
allow: {
contentTypes: ['image/*']
}
}
});
Server Side
var imageStore = new FS.Store.S3("imageStore", {
accessKeyId: "xxxx",
secretAccessKey: "xxxx",
bucket: "mybucket"
});
Images = new FS.Collection("images", {
stores: [imageStore],
filter: {
allow: {
contentTypes: ['image/*']
}
}
});
Anyone has any idea what happens?
Using ember-cli 0.1.2 and ember-cli-simple-auth 0.7.0, I need to invalidate the session both on client and server. As explained here I need to do something similar to the authenticate method making an ajax request to the server and ensuring its success before emptying the session:
import Ember from 'ember';
import Base from "simple-auth/authenticators/base";
var CustomAuthenticator = Base.extend({
tokenEndpoint: 'http://127.0.0.1:3000/api/v1/auth/login',
restore: function(data) {
},
authenticate: function(credentials) {
var _this = this;
return new Ember.RSVP.Promise(function(resolve, reject) {
Ember.$.ajax({
url: _this.tokenEndpoint,
type: 'POST',
data: JSON.stringify({ email: credentials.identification, password: credentials.password }),
contentType: 'application/json'
}).then(function(response) {
Ember.run(function() {
resolve({ token: response.token });
});
}, function(xhr, status, error) {
var response = JSON.parse(xhr.responseText);
Ember.run(function() {
reject(response.error);
});
});
});
},
invalidate: function() {
var _this = this;
return new Ember.RSVP.Promise(function(resolve, reject) {
Ember.$.ajax({
url: _this.tokenEndpoint,
type: 'DELETE'
}).then(function(response) {
resolve();
}, function(xhr, status, error) {
var response = JSON.parse(xhr.responseText);
Ember.run(function() {
reject(response.error);
});
});
});
}
// invalidate: function() {
// var _this = this;
// return new Ember.RSVP.Promise(function(resolve) {
// Ember.$.ajax({ url: _this.tokenEndpoint, type: 'DELETE' }).always(function() {
// resolve();
// });
// });
// }
});
export default {
name : 'authentication',
before : 'simple-auth',
initialize : function(container) {
container.register('authenticator:custom', CustomAuthenticator);
}
};
My logout API endpoint need the token (in the headers). How do I pass it? I read this but my authorizer seems ignoring it and I got a 401:
import Ember from 'ember';
import Base from 'simple-auth/authorizers/base';
var CustomAuthorizer = Base.extend({
authorize: function(jqXHR, requestOptions){
Ember.debug("AUTHORIZING!");
}
});
export default {
name : 'authorization',
before : 'simple-auth',
initialize : function(container) {
container.register('authorizer:custom', CustomAuthorizer);
}
};
My environment.js:
/* jshint node: true */
module.exports = function(environment) {
var ENV = {
modulePrefix: 'wishhhh',
environment: environment,
baseURL: '/',
locationType: 'auto',
EmberENV: {
FEATURES: {
// Here you can enable experimental features on an ember canary build
// e.g. 'with-controller': true
}
},
APP: {
// Here you can pass flags/options to your application instance
// when it is created
}
};
// TODO: disabled because of https://github.com/stefanpenner/ember-cli/issues/2174
ENV.contentSecurityPolicyHeader = 'Disabled-Content-Security-Policy'
ENV['simple-auth'] = {
authorizer: 'authorizer:custom',
// crossOriginWhitelist: ['http://localhost:3000']
crossOriginWhitelist: ['*']
}
if (environment === 'development') {
// ENV.APP.LOG_RESOLVER = true;
ENV.APP.LOG_ACTIVE_GENERATION = true;
// ENV.APP.LOG_TRANSITIONS = true;
// ENV.APP.LOG_TRANSITIONS_INTERNAL = true;
ENV.APP.LOG_VIEW_LOOKUPS = true;
}
if (environment === 'test') {
// Testem prefers this...
ENV.baseURL = '/';
ENV.locationType = 'auto';
// keep test console output quieter
ENV.APP.LOG_ACTIVE_GENERATION = false;
ENV.APP.LOG_VIEW_LOOKUPS = false;
ENV.APP.rootElement = '#ember-testing';
}
if (environment === 'production') {
}
return ENV;
};
The following is the Ember inspector output when, eventually, I try to logout:
Did you actually configure Ember Simple Auth to use your custom authorizer? In that case it should authorize the session invalidation request automatically.
Alternatively you could add the token in the authenticator's invalidate method which gets passed the session's contents.
Thanks to marcoow, I found out that it was actually a problem with every request not only the logout one. My authorizer never got called. Problem was environment setup of crossOriginWhitelist which, in order to work with my dev API, I had to set to ['http://127.0.0.1:3000']. Neither ['http://localhost:3000'] nor [*] worked.