I'd like a npm script to create/configure/etc. and finally import a SQL dump. The entire creation, configuring, etc. is all working, however, I cannot get the import to work. The data never is inserted. Here's what I have (nevermind the nested callback as they'll be turned into promises):
connection.query(`DROP DATABASE IF EXISTS ${config.database};`, err => {
connection.query(`CREATE DATABASE IF NOT EXISTS ${config.database};`, err => {
connection.query('use DATABASENAME', err => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
connection.query(`SOURCE ${sqlDumpPath}`, err => {
connection.end(err => resolve());
});
})
});
});
I also tried the following with Sequelize (ORM):
return new Promise(resolve => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
fs.readFile('./sql/dump.sql', 'utf-8', (err, data) => {
sequelize
.query(data)
.then(resolve)
.catch(console.error);
});
});
Here's how I set up my initial Sequelized import using the migrations framework. There is plenty of going on here but in short I:
find the latest sql-dump in the migrations folder
read the file using fs
split the text into queries
check if its a valid query and if so apply some cleaning that my data required (see related post)
push an array full of queries - I start with making sure that the database is clean by calling the this.down first
run everything as a promise (as suggested here) using the mapSeries (not the map)
Using sequelize-cli you can in your shell create a migration by writing:
sequelize migration:create
And you will automatically have the file where you enter the code below. In order to execute the migration you simply write:
sequelize db:migrate
"use strict";
const promise = require("bluebird");
const fs = require("fs");
const path = require("path");
const assert = require("assert");
const db = require("../api/models"); // To be able to run raw queries
const debug = require("debug")("my_new_api");
// I needed this in order to get some encoding issues straight
const Aring = new RegExp(String.fromCharCode(65533) +
"\\" + String.fromCharCode(46) + "{1,3}", "g");
const Auml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(44) + "{1,3}", "g");
const Ouml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(45) + "{1,3}", "g");
module.exports = {
up: function (queryInterface, Sequelize) {
// The following section allows me to have multiple sql-files and only use the last dump
var last_sql;
for (let fn of fs.readdirSync(__dirname)){
if (fn.match(/\.sql$/)){
fn = path.join(__dirname, fn);
var stats = fs.statSync(fn);
if (typeof last_sql === "undefined" ||
last_sql.stats.mtime < stats.mtime){
last_sql = {
filename: fn,
stats: stats
};
}
}
}
assert(typeof last_sql !== "undefined", "Could not find any valid sql files in " + __dirname);
// Split file into queries
var queries = fs.readFileSync(last_sql.filename).toString().split(/;\n/);
var actions = [{
query: "Running the down section",
exec: this.down
}]; // Clean database by calling the down first
for (let i in queries){
// Skip empty queries and the character set information in the 40101 section
// as this would most likely require a multi-query set-up
if (queries[i].trim().length == 0 ||
queries[i].match(new RegExp("/\\*!40101 .+ \\*/"))){
continue;
}
// The manual fixing of encoding
let clean_query = queries[i]
.replace(Aring, "Å")
.replace(Ouml, "Ö")
.replace(Auml, "Ä");
actions.push({
query: clean_query.substring(0, 200), // We save a short section of the query only for debugging purposes
exec: () => db.sequelize.query(clean_query)
});
}
// The Series is important as the order isn't retained with just map
return promise.mapSeries(actions, function(item) {
debug(item.query);
return item.exec();
}, { concurrency: 1 });
},
down: function (queryInterface, Sequelize) {
var tables_2_drop = [
"items",
"users",
"usertypes"
];
var actions = [];
for (let tbl of tables_2_drop){
actions.push({
// The created should be created_at
exec: () => db.sequelize.query("DROP TABLE IF EXISTS `" + tbl +"`")
});
}
return promise.map(actions, function(item) {
return item.exec();
}, { concurrency: 1 });/**/
}
};
Based loosely on Max Gordon's answer, here's my code to run a MySQL Dump file from NodeJs/Sequelize:
"use strict";
const fs = require("fs");
const path = require("path");
/**
* Start off with a MySQL Dump file, import that, and then migrate to the latest version.
*
* #param dbName {string} the name of the database
* #param mysqlDumpFile {string} The full path to the file to import as a starting point
*/
module.exports.migrateFromFile = function(dbName, mysqlDumpFile) {
let sequelize = createSequelize(dbName);
console.log("Importing from " + mysqlDumpFile + "...");
let queries = fs.readFileSync(mysqlDumpFile, {encoding: "UTF-8"}).split(";\n");
console.log("Importing dump file...");
// Setup the DB to import data in bulk.
let promise = sequelize.query("set FOREIGN_KEY_CHECKS=0"
).then(() => {
return sequelize.query("set UNIQUE_CHECKS=0");
}).then(() => {
return sequelize.query("set SQL_MODE='NO_AUTO_VALUE_ON_ZERO'");
}).then(() => {
return sequelize.query("set SQL_NOTES=0");
});
console.time("Importing mysql dump");
for (let query of queries) {
query = query.trim();
if (query.length !== 0 && !query.match(/\/\*/)) {
promise = promise.then(() => {
console.log("Executing: " + query.substring(0, 100));
return sequelize.query(query, {raw: true});
})
}
}
return promise.then(() => {
console.timeEnd("Importing mysql dump");
console.log("Migrating the rest of the way...");
console.time("Migrating after importing mysql dump");
return exports.migrateUp(dbName); // Run the rest of your migrations
}).then(() => {
console.timeEnd("Migrating after importing mysql dump");
});
};
Related
It seems quite new, but just hoping someone here has been able to use nodejs to write directly to BigQuery storage using #google-cloud/bigquery-storage.
There is an explanation of how the overall backend API works and how to write a collection of rows atomically using BigQuery Write API but no such documentation for nodejs yet. A recent release 2.7.0 documents the addition of said feature but there is no documentation, and the code is not easily understood.
There is an open issue requesting an example but thought I'd try my luck to see if anyone has been able to use this API yet.
Suppose you have a BigQuery table called student with three columns id,name and age. Following steps will get you to load data into the table with nodejs storage write api.
Define student.proto file as follows
syntax = "proto2";
message Student {
required int64 id = 1;
optional string name = 2;
optional int64 age = 3;
}
Run the following at the command prompt
protoc --js_out=import_style=commonjs,binary:. student.proto
It should generate student_pb.js file in the current directory.
Write the following js code in the current directory and run it
const {BigQueryWriteClient} = require('#google-cloud/bigquery-storage').v1;
const st = require('./student_pb.js')
const type = require('#google-cloud/bigquery-storage').protos.google.protobuf.FieldDescriptorProto.Type
const mode = require('#google-cloud/bigquery-storage').protos.google.cloud.bigquery.storage.v1.WriteStream.Type
const storageClient = new BigQueryWriteClient();
const parent = `projects/${project}/datasets/${dataset}/tables/student`
var writeStream = {type: mode.PENDING}
var student = new st.Student()
var protoDescriptor = {}
protoDescriptor.name = 'student'
protoDescriptor.field = [{'name':'id','number':1,'type':type.TYPE_INT64},{'name':'name','number':2,'type':type.TYPE_STRING},{'name':'age','number':3,'type':type.TYPE_INT64}]
async function run() {
try {
var request = {
parent,
writeStream
}
var response = await storageClient.createWriteStream(request);
writeStream = response[0].name
var serializedRows = []
//Row 1
student.setId(1)
student.setName('st1')
student.setAge(15)
serializedRows.push(student.serializeBinary())
//Row 2
student.setId(2)
student.setName('st2')
student.setAge(15)
serializedRows.push(student.serializeBinary())
var protoRows = {
serializedRows
}
var proto_data = {
writerSchema: {protoDescriptor},
rows: protoRows
}
// Construct request
request = {
writeStream,
protoRows: proto_data
};
// Insert rows
const stream = await storageClient.appendRows();
stream.on('data', response => {
console.log(response);
});
stream.on('error', err => {
throw err;
});
stream.on('end', async () => {
/* API call completed */
try {
var response = await storageClient.finalizeWriteStream({name: writeStream})
response = await storageClient.batchCommitWriteStreams({parent,writeStreams: [writeStream]})
}
catch(err) {
console.log(err)
}
});
stream.write(request);
stream.end();
}
catch(err) {
console.log(err)
}
}
run();
Make sure your environment variables are set correctly to point to the file containing google cloud credentials.
Change project and dataset values accordingly.
This is the query I am using:
app.get("/items/:data", async (req, res) => {
const { data } = req.params;
query = `
SELECT items.discount
FROM items
WHERE items.discount #? '$[*] ? (#.discount[*].shift == $1)'
`
try {
const obj = await pool.query(query, [data]);
res.json(obj.rows[0])
} catch(err) {
console.error(err.message);
}
});
I get this error:
error: bind message supplies 1 parameters, but prepared statement "" requires 0
I am using node-postgres package in node.js.
How can I solve this issue?
Use bracket notation instead of dot notation. So instead of obj.key use obj[key]
Updated
all them driver connectors come with their own method to do what you're looking for. node-postgres also have there own
Pool
import { Pool } from 'pg';
const pool = new Pool({
host: 'localhost',
user: 'database-user',
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
/**
* execs the given sql statement.
*
* #param {string} sql - query to run.
* #param {Array} params - an array with the parameter.
* #example
* runQuery("SELECT * FROM users WHERE id = $1", [1]).then(result=> console.log(result))
*/
export async function runQuery (sql, params) {
const connection = await pool.connect()
try {
await connection.query('BEGIN')
const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
const result = await connection.query(sql,params);
// check what result has
console.log(result);
return connection.query('COMMIT').then(result)
} catch (e) {
await connection.query('ROLLBACK')
throw e;
throw e
} finally {
connection.release()
}
}
Pool Config
config = {
// all valid client config options are also valid here
// in addition here are the pool specific configuration parameters:
// number of milliseconds to wait before timing out when connecting a new client
// by default this is 0 which means no timeout
connectionTimeoutMillis?: int,
// number of milliseconds a client must sit idle in the pool and not be checked out
// before it is disconnected from the backend and discarded
// default is 10000 (10 seconds) - set to 0 to disable auto-disconnection of idle clients
idleTimeoutMillis?: int,
// maximum number of clients the pool should contain
// by default this is set to 10.
max?: int,
}
conclution
so basically the structure of a query should be like or less this
const text = 'INSERT INTO users(name, email) VALUES($1, $2) RETURNING *'
const values = ['brianc', 'brian.m.carlson#gmail.com']
connection
.query(text, values)
.then(res => {
console.log(res.rows[0])
// { name: 'brianc', email: 'brian.m.carlson#gmail.com' }
})
.catch(e => console.error(e.stack))
So in my project I am trying to gather simple Discord username and unique identifier from discord and store it in SQLite database file. I get the error:
` let userDB = new sqlite.Database('./disco.db', sqlite.OPEN_READWRITE);
^
TypeError: sqlite.Database is not a constructor`
Here is my code in my index.js
// Requirements
const Discord = require('discord.js');
const client = new Discord.Client();
const fs = require('fs');
const ServList = client.guilds.cache.size;
const sqlite = require('sqlite3').verbose();
require('dotenv').config()
//client login function
client.login(process.env.TOKEN);
// Start up Check list
client.once('ready', () => {
//Log and Set Status
console.log('Bot Online');
client.user.setActivity(`Proudly in ${client.guilds.cache.size} servers`, {
type: "WATCHING",
}, 60000);
//Database Initialization
let userDB = new sqlite.Database('./disco.db', sqlite.OPEN_READWRITE | sqlite.OPEN_CREATE);
});
Here is my code for the command that is creating the error:
const Discord = require('discord.js');
const sqlite = require('sqlite3').verbose();
module.exports = {
name: 'create',
description: "Create your account!",
use(message, args, client, sqlite){
// Data to Add
let userDB = new sqlite.Database('./disco.db', sqlite.OPEN_READWRITE);
userDB.run(`CREATE TABLE IF NOT EXIST usersInfo(userID INTEGER NOT NULL, uNameR TEXT NOT NULL)`);
let userID = message.author.id;
let uName = message.author.tag;
let uQuery = `SELECT * FROM usersInfo WHERE userID = ?`;
userDB.get(uQuery, [userID], (err, row) => {
if (err) {
console.log(err);
return;
}
if (row === undefined){
userDB.prepare(`INSERT INTO usersInfo VALUES(?,?)`);
insertdata.run('userID, uName');
insertdata.finalize();
userDB.close();
} else {
let userID2 = row.userID;
let yName = row.uNameR;
console.log(yName, userID);
}
});
message.channel.send('success');
}
}
Edit: Your question has been identified as a possible duplicate of another question. If the answers there do not address your problem, please edit to explain in detail the parts of your question that are unique.
The suggestion solution does not work for me as the suggested answer utilizes mySQL while I use SQLite3, Not only that but the suggested answer attempts to connect to a hosted database while mine is local.
I am downloading a zip file which has a json zipped. Using cy.readfile, I am able to read the content but I am not sure what commands can be used to assert on the values inside.
(Please let me know if there is a way to unzip the file before reading)
I need to verify I have 3 objectids present in the json and also some values of the elements.
I tried the below approach, but it did not work.
cy.readFile(`/Users/${username}/Downloads/${fileName}.zip`)
.should('contain','objectid').and('have.length',3);
The above command did not work for me :(
Could someone help me with some examples? I am new to cypress and coding,therefore struggling a little.
You can change the download folder in every test case!!
Look into your index.js in -> cypress -> plugins -> index.js and write this :
module.exports = (on, config) => {
on('before:browser:launch', (browser, options) => {
const downloadDirectory = 'C:\\downloads\\'; // this is the path you want to download
options.preferences.default['download'] = { default_directory: downloadDirectory };
return options;
});
};
Do it like this
cy.readFile(`/Users/${username}/Downloads/${fileName}.zip`)
.then((data) => {
// you can write whatever assertions you want on data
debugger;
console.log(data);
expect(data).to....
})
You can put debugger as above and logs to check what data contains and then assert
Use this link to know about available assertions https://docs.cypress.io/guides/references/assertions.html#BDD-Assertions
So here is the approach I am following.It is quite lengthy, but still posting as it might be helpful for someone.Please comment if you have any suggestions for improvements here.
I am using npm-unzipper to unzip the downloaded file.
Step 1: $ npm install unzipper
Step 2:In plugins > index.js
const fs = require('fs');
const os = require('os');
const osplatform = os.platform();
const unzipper = require('unzipper');
const userName = os.userInfo().username;
let downloadPath =`/${userName}/Downloads/`;
if (osplatform == 'win32'){
downloadPath = `/Users/${userName}/Downloads/`;
}
on('task', {
extractzip(zipname) {
const zipPath = downloadPath + zipname;
if (fs.existsSync(zipPath)) {
const readStream = fs.createReadStream(zipPath);
readStream.pipe(unzipper.Extract({path: `${downloadPath}`}));
const jsonname = 'testfile.json'
const jsonPath = downloadPath + jsonname;
return jsonPath;
}
else{
console.error('file not downloaded')
return null;
}
}
})
Step 3:support > commands.js
Cypress.Commands.add('comparefiles', { prevSubject: false }, (subject, options = {}) => {
cy.task('extractzip', 'exportfile.zip').then((jsonPath) => {
cy.fixture('export.json').then((comparefile) => {
cy.readFile(jsonPath).then((exportedfile) => {
var exported_objectinfo = exportedfile.objectInfo;
var compare_objectinfo = comparefile.objectInfo;
var exported_metaInfo = exportedfile.metaInfo;
var compare_metaInfo = comparefile.metaInfo;
expect(exported_objectinfo).to.contain.something.like(compare_objectinfo)
expect(exported_metaInfo).to.have.deep.members(compare_metaInfo)
})
})
});
});
Step 4: specs > exportandcompare.js
cy.get('[data-ci-button="Export"]').click();
cy.comparefiles();
Ok so my bot got rebuilt with a somewhat different code.
I'm using a somewhat more simplified fs command and events handler. My command works as intended.
But I'm wanting to add the amount pruned into the fields for the richEmbed and it keeps erroring out.
Here is my purge.js file
const Discord = require('discord.js')
module.exports = {
name: 'purge',
description: 'Purge up to 99 messages.',
execute(message, args) {
console.log("purging messages")
const embed = new Discord.RichEmbed()
.setTitle("Success")
.setColor(0x00AE86)
.setFooter("Guardian", "https://raw.githubusercontent.com/phantomdev-github/Resources/master/Discord%20Bots/Guardian/src/avatar.png")
.setThumbnail("https://raw.githubusercontent.com/phantomdev-github/Resources/master/Discord%20Bots/Guardian/src/avatar.png")
.setTimestamp()
.setURL("https://github.com/phantomdev-github/Resources/tree/master/Discord%20Bots/Guardian")
.addField("Bot Messages Purged", "missing code here", false)
.addField("User Pins Purged", "missing code here", false)
.addField("User Messages Purged", "missing code here", false)
.addField("Total Messages Purged", "missing code here", false)
message.channel.send({ embed });
const amount = parseInt(args[0]) + 1;
if (isNaN(amount)) {
return message.reply('that doesn\'t seem to be a valid number.');
} else if (amount <= 1 || amount > 100) {
return message.reply('you need to input a number between 1 and 99.');
}
message.channel.bulkDelete(amount, true).catch(err => {
console.error(err);
message.channel.send('there was an error trying to prune messages in this channel!');
});
},
};
If it helps this i my index.js
const fs = require('fs');
const Discord = require('discord.js');
const client = new Discord.Client();
const { token } = require('./token.json');
client.commands = new Discord.Collection();
const commandFiles = fs.readdirSync('./commands').filter(file => file.endsWith('.js'));
for (const file of commandFiles) {
const command = require(`./commands/${file}`);
client.commands.set(command.name, command);
console.log(file,command)
}
fs.readdir('./events/', (err, files) => {
if (err) return console.error(err);
files.forEach(file => {
if(!file.endsWith('.js')) return;
const eventFunction = require(`./events/${file}`);
console.log(eventFunction)
eventFunction.execute(client)
});
});
client.login(token);
and this is my message.js
const { prefix } = require('./prefix.json');
module.exports = {
name: 'message',
description: '',
execute:function(client) {
client.on('message',message => {
if (!message.content.startsWith(prefix) || message.author.bot) return;
const args = message.content.slice(prefix.length).split(/ +/);
const command = args.shift().toLowerCase();
if (!client.commands.has(command)) return;
try {
client.commands.get(command).execute(message, args);
} catch (error) {
console.error(error);
message.reply('there was an error trying to execute that command!');
}
})
}};
Basically I'm trying to figure out what to place into the "missing code here" sections. Also any way to lock it to people with Administrator permissions only would be useful as well. I attempted that but it failed to work with the embed.
If I understand you right you want to know how to get the amount of the purged pins, bot msgs and user msgs. For this you need to put your embed after you deleted the messages.
purge.js
const Discord = require('discord.js')
module.exports = {
name: 'purge',
description: 'Purge up to 99 messages.',
execute(message, args) {
console.log("purging messages")
const amount = parseInt(args[0]) + 1;
if (isNaN(amount)) {
return message.reply('that doesn\'t seem to be a valid number.');
} else if (amount <= 1 || amount > 100) {
return message.reply('you need to input a number between 1 and 99.');
}
message.channel.bulkDelete(amount, true).then(deletedMessages => {
// Filter the deleted messages with .filter()
var botMessages = deletedMessages.filter(m => m.author.bot);
var userPins = deletedMessages.filter(m => m.pinned);
var userMessages = deletedMessages.filter(m => !m.author.bot);
const embed = new Discord.RichEmbed()
.setTitle("Success")
.setColor(0x00AE86)
.setFooter("Guardian", "https://raw.githubusercontent.com/phantomdev-github/Resources/master/Discord%20Bots/Guardian/src/avatar.png")
.setThumbnail("https://raw.githubusercontent.com/phantomdev-github/Resources/master/Discord%20Bots/Guardian/src/avatar.png")
.setTimestamp()
.setURL("https://github.com/phantomdev-github/Resources/tree/master/Discord%20Bots/Guardian")
.addField("Bot Messages Purged", botMessages.size, false)
.addField("User Pins Purged", userPins.size, false)
.addField("User Messages Purged", userMessages.size, false)
.addField("Total Messages Purged", deletedMessages.size, false);
message.channel.send(embed);
}).catch(err => {
console.error(err);
message.channel.send('there was an error trying to prune messages in this channel!');
});
},
};