How to add variable into JSON path - node-postgres

This is the query I am using:
app.get("/items/:data", async (req, res) => {
const { data } = req.params;
query = `
SELECT items.discount
FROM items
WHERE items.discount #? '$[*] ? (#.discount[*].shift == $1)'
`
try {
const obj = await pool.query(query, [data]);
res.json(obj.rows[0])
} catch(err) {
console.error(err.message);
}
});
I get this error:
error: bind message supplies 1 parameters, but prepared statement "" requires 0
I am using node-postgres package in node.js.
How can I solve this issue?

Use bracket notation instead of dot notation. So instead of obj.key use obj[key]

Updated
all them driver connectors come with their own method to do what you're looking for. node-postgres also have there own
Pool
import { Pool } from 'pg';
const pool = new Pool({
  host: 'localhost',
user: 'database-user',
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
/**
* execs the given sql statement.
*
* #param {string} sql - query to run.
* #param {Array} params - an array with the parameter.
* #example
* runQuery("SELECT * FROM users WHERE id = $1", [1]).then(result=> console.log(result))
*/
export async function runQuery (sql, params) {
const connection = await pool.connect()
try {
await connection.query('BEGIN')
const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
const result = await connection.query(sql,params);
// check what result has
console.log(result);
return connection.query('COMMIT').then(result)
} catch (e) {
await connection.query('ROLLBACK')
throw e;
throw e
} finally {
connection.release()
}
}
Pool Config
config = {
// all valid client config options are also valid here
// in addition here are the pool specific configuration parameters:
// number of milliseconds to wait before timing out when connecting a new client
// by default this is 0 which means no timeout
connectionTimeoutMillis?: int,
// number of milliseconds a client must sit idle in the pool and not be checked out
// before it is disconnected from the backend and discarded
// default is 10000 (10 seconds) - set to 0 to disable auto-disconnection of idle clients
idleTimeoutMillis?: int,
// maximum number of clients the pool should contain
// by default this is set to 10.
max?: int,
}
conclution
so basically the structure of a query should be like or less this
const text = 'INSERT INTO users(name, email) VALUES($1, $2) RETURNING *'
const values = ['brianc', 'brian.m.carlson#gmail.com']
connection
.query(text, values)
.then(res => {
console.log(res.rows[0])
// { name: 'brianc', email: 'brian.m.carlson#gmail.com' }
})
.catch(e => console.error(e.stack))

Related

How to make several postgresql queries atomic (roll back in case of error)?

I'm using PostgreSQL with nodejs for the first time. I want to make several PostgreSQL queries atomic in case of error.
For example:
const group = Group.of(body);
const { rows } = await this.db.query(
`INSERT INTO groups
(\"defaultImage\", \"createdAt\", \"updatedAt\")
VALUES ($1, $2, $3)
RETURNING *`,
[group.defaultImage, group.createdAt, group.updatedAt]);
const groupId = rows[0].id;
group.images.map(image => {
return this.db.query(
`INSERT INTO groups_images
(\"groups_id\", \"uri\")
VALUES ($1, $2)`,
[groupId, image.uri]);
});
If the second query fails, I want the first to be rolled back
Using Transaction will solve the problem as explained here:
const { Pool } = require('pg')
const pool = new Pool()
;(async () => {
// note: we don't try/catch this because if connecting throws an exception
// we don't need to dispose of the client (it will be undefined)
const client = await pool.connect()
try {
await client.query('BEGIN')
const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
const res = await client.query(queryText, ['brianc'])
const insertPhotoText = 'INSERT INTO photos(user_id, photo_url) VALUES ($1, $2)'
const insertPhotoValues = [res.rows[0].id, 's3.bucket.foo']
await client.query(insertPhotoText, insertPhotoValues)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
throw e
} finally {
client.release()
}
})().catch(e => console.error(e.stack))
https://node-postgres.com/features/transactions

Insert session data into postgres database in Nodejs Expess app

I have an Nodejs express function where I am trying to insert data that is stored in the browser session into my postgres database. When I have the insert statement like this, the insert works but the session-stored customer_id isn't inserted and is just left null.
On the line with "var sql = INSERT INTO journal....", the values $1 and $2 are from user input and work correctly.
How can I get value 3 of the customer_id stored in the session to insert correctly? I would appreciate any advice or greater understanding.
app.post("/addJournalEntry", addJournalEntry);
function addJournalEntry(req, res) {
console.log("Posting data");
// var id = req.query.id;
//body is for post, query is for get
const customer_id = req.session.customer_id;
const journal_entry_date = req.body.journal_entry_date;
const journal_entry = req.body.journal_entry;
const params = [journal_entry, journal_entry_date, customer_id];
addEntryFromDataLayer(params, function (error, addEntry) {
console.log("Back From the addEntryFromDataLayer:", addEntry);
if (error || addEntry == null) {
res.status(500).json({
success: false,
data: error
});
}
else {
// res.json(result);
res.status(200).json(addEntry);
}
});
}
function addEntryFromDataLayer(params, callback) {
console.log("addEntryFromDataLayer called with id");
var sql = "INSERT INTO journal (journal_entry, journal_entry_date, customer_id) VALUES($1::text, $2::text, $3)";
// var params = [id];
pool.query(sql, params, function (err, addEntry) {
if (err) {
console.log("error in database connection");
console.log(err);
callback(err, null);
}
console.log("Found DB result:" + JSON.stringify(addEntry));
callback(null, addEntry);
});
}

Reading from one db into another

I am trying to right a function that copies some fields from several company databases into my own database once a day. What I have so far is below. I am wondering if where I console.log(rs) I can open another sql connection to my database and write the data or if I have to store the results somewhere and then open a new connection and send the stored results.
function updateJobs() {
var query = "SELECT JobStart, JobEnd FROM JobData";
sql.connect(Config, (err) => {
if (err) {
console.log("Error while connecting database :- " + err);
} else {
var request = new sql.Request();
request.query(query, function (err, rs) {
if (err) {
console.log("Error while querying database :- " + err);
sql.close();
} else {
console.log(rs);
sql.close();
}
})
}
})
}
This might help
// Source Database
sourceDB.each(`select * from ${Sourcetable}`, (error, row) => {
console.log(row);
const keys = Object.keys(row); // ['columnA', 'columnB']
const columns = keys.toString(); // 'columnA,columnB'
let parameters = {};
let values = '';
// Generate values and named parameters
Object.keys(row).forEach((r) => {
var key = '$' + r;
// Generates '$columnA,$columnB'
values = values.concat(',', key);
// Generates { $columnA: 'ABC', $columnB: 'GHK' }
parameters[key] = row[r];
});
// Insert into another database into OneTable (columnA,columnB) values ($columnA,$columnB)
// Parameters: { $columnA: 'ABC', $columnB: 'GHK' }
destDB.run(`insert into ${Desttable} (${columns}) values (${values})`, parameters);
})
})

How I can set expiration date for AsyncStorage - react native

I am using react native async storage it works good but in some cases, I have to set an expiration date for data and refresh my storage I checked
AsyncStorage documentation but there are no options to set expire after a specific time.
only available options are:-
AsyncStorage.removeItem
AsyncStorage really only handles storage and nothing beyond that.
If you want to set an expiration, just put a key in your data for access date and set it to new Date(). Then, when you pull data, do a date check on the expiration key based on when it should expire.
first, I am storing objects, not strings so my solution will be based on object case if anyone uses strings he can append expireAt the object key then he will extract expire date and compare it with the current date
my solution:-
/**
*
* #param urlAsKey
* #param expireInMinutes
* #returns {Promise.<*>}
*/
async getCachedUrlContent(urlAsKey, expireInMinutes = 60) {
let data = null;
await AsyncStorage.getItem(urlAsKey, async (err, value) => {
data = (JSON.parse(value));
// there is data in cache && cache is expired
if (data !== null && data['expireAt'] &&
new Date(data.expireAt) < (new Date())) {
//clear cache
AsyncStorage.removeItem(urlAsKey);
//update res to be null
data = null;
} else {
console.log('read data from cache ');
}
});
//update cache + set expire at date
if (data === null) {
console.log('cache new Date ');
//fetch data
data = fetch(urlAsKey).then((response) => response.json())
.then(apiRes => {
//set expire at
apiRes.expireAt = this.getExpireDate(expireInMinutes);
//stringify object
const objectToStore = JSON.stringify(apiRes);
//store object
AsyncStorage.setItem(urlAsKey, objectToStore);
console.log(apiRes.expireAt);
return apiRes;
});
}
return data;
},
/**
*
* #param expireInMinutes
* #returns {Date}
*/
getExpireDate(expireInMinutes) {
const now = new Date();
let expireTime = new Date(now);
expireTime.setMinutes(now.getMinutes() + expireInMinutes);
return expireTime;
}
You can use this also, improvement from Ahmed Farag Mostafa answers
import AsyncStorage from "#react-native-async-storage/async-storage";
export default class ExpireStorage {
static async getItem(key) {
let data = await AsyncStorage.getItem(key);
data = JSON.parse(data);
if (
data !== null &&
data.expireAt &&
new Date(data.expireAt) < new Date()
) {
await AsyncStorage.removeItem(key);
data = null;
}
return data?.value;
}
static async setItem(key, value, expireInMinutes) {
const data = { value };
if (expireInMinutes) {
const expireAt = this.getExpireDate(expireInMinutes);
data.expireAt = expireAt;
} else {
const expireAt = JSON.parse(await AsyncStorage.getItem(key))?.expireAt;
if (expireAt) {
data.expireAt = expireAt;
} else {
return;
}
}
const objectToStore = JSON.stringify(data);
return AsyncStorage.setItem(key, objectToStore);
}
static async removeItem(key) {
return AsyncStorage.removeItem(key);
}
static getExpireDate(expireInMinutes) {
const now = new Date();
const expireTime = new Date(now);
expireTime.setMinutes(now.getMinutes() + expireInMinutes);
return expireTime;
}
}

Import SQL dump within Node environment

I'd like a npm script to create/configure/etc. and finally import a SQL dump. The entire creation, configuring, etc. is all working, however, I cannot get the import to work. The data never is inserted. Here's what I have (nevermind the nested callback as they'll be turned into promises):
connection.query(`DROP DATABASE IF EXISTS ${config.database};`, err => {
connection.query(`CREATE DATABASE IF NOT EXISTS ${config.database};`, err => {
connection.query('use DATABASENAME', err => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
connection.query(`SOURCE ${sqlDumpPath}`, err => {
connection.end(err => resolve());
});
})
});
});
I also tried the following with Sequelize (ORM):
return new Promise(resolve => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
fs.readFile('./sql/dump.sql', 'utf-8', (err, data) => {
sequelize
.query(data)
.then(resolve)
.catch(console.error);
});
});
Here's how I set up my initial Sequelized import using the migrations framework. There is plenty of going on here but in short I:
find the latest sql-dump in the migrations folder
read the file using fs
split the text into queries
check if its a valid query and if so apply some cleaning that my data required (see related post)
push an array full of queries - I start with making sure that the database is clean by calling the this.down first
run everything as a promise (as suggested here) using the mapSeries (not the map)
Using sequelize-cli you can in your shell create a migration by writing:
sequelize migration:create
And you will automatically have the file where you enter the code below. In order to execute the migration you simply write:
sequelize db:migrate
"use strict";
const promise = require("bluebird");
const fs = require("fs");
const path = require("path");
const assert = require("assert");
const db = require("../api/models"); // To be able to run raw queries
const debug = require("debug")("my_new_api");
// I needed this in order to get some encoding issues straight
const Aring = new RegExp(String.fromCharCode(65533) +
"\\" + String.fromCharCode(46) + "{1,3}", "g");
const Auml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(44) + "{1,3}", "g");
const Ouml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(45) + "{1,3}", "g");
module.exports = {
up: function (queryInterface, Sequelize) {
// The following section allows me to have multiple sql-files and only use the last dump
var last_sql;
for (let fn of fs.readdirSync(__dirname)){
if (fn.match(/\.sql$/)){
fn = path.join(__dirname, fn);
var stats = fs.statSync(fn);
if (typeof last_sql === "undefined" ||
last_sql.stats.mtime < stats.mtime){
last_sql = {
filename: fn,
stats: stats
};
}
}
}
assert(typeof last_sql !== "undefined", "Could not find any valid sql files in " + __dirname);
// Split file into queries
var queries = fs.readFileSync(last_sql.filename).toString().split(/;\n/);
var actions = [{
query: "Running the down section",
exec: this.down
}]; // Clean database by calling the down first
for (let i in queries){
// Skip empty queries and the character set information in the 40101 section
// as this would most likely require a multi-query set-up
if (queries[i].trim().length == 0 ||
queries[i].match(new RegExp("/\\*!40101 .+ \\*/"))){
continue;
}
// The manual fixing of encoding
let clean_query = queries[i]
.replace(Aring, "Å")
.replace(Ouml, "Ö")
.replace(Auml, "Ä");
actions.push({
query: clean_query.substring(0, 200), // We save a short section of the query only for debugging purposes
exec: () => db.sequelize.query(clean_query)
});
}
// The Series is important as the order isn't retained with just map
return promise.mapSeries(actions, function(item) {
debug(item.query);
return item.exec();
}, { concurrency: 1 });
},
down: function (queryInterface, Sequelize) {
var tables_2_drop = [
"items",
"users",
"usertypes"
];
var actions = [];
for (let tbl of tables_2_drop){
actions.push({
// The created should be created_at
exec: () => db.sequelize.query("DROP TABLE IF EXISTS `" + tbl +"`")
});
}
return promise.map(actions, function(item) {
return item.exec();
}, { concurrency: 1 });/**/
}
};
Based loosely on Max Gordon's answer, here's my code to run a MySQL Dump file from NodeJs/Sequelize:
"use strict";
const fs = require("fs");
const path = require("path");
/**
* Start off with a MySQL Dump file, import that, and then migrate to the latest version.
*
* #param dbName {string} the name of the database
* #param mysqlDumpFile {string} The full path to the file to import as a starting point
*/
module.exports.migrateFromFile = function(dbName, mysqlDumpFile) {
let sequelize = createSequelize(dbName);
console.log("Importing from " + mysqlDumpFile + "...");
let queries = fs.readFileSync(mysqlDumpFile, {encoding: "UTF-8"}).split(";\n");
console.log("Importing dump file...");
// Setup the DB to import data in bulk.
let promise = sequelize.query("set FOREIGN_KEY_CHECKS=0"
).then(() => {
return sequelize.query("set UNIQUE_CHECKS=0");
}).then(() => {
return sequelize.query("set SQL_MODE='NO_AUTO_VALUE_ON_ZERO'");
}).then(() => {
return sequelize.query("set SQL_NOTES=0");
});
console.time("Importing mysql dump");
for (let query of queries) {
query = query.trim();
if (query.length !== 0 && !query.match(/\/\*/)) {
promise = promise.then(() => {
console.log("Executing: " + query.substring(0, 100));
return sequelize.query(query, {raw: true});
})
}
}
return promise.then(() => {
console.timeEnd("Importing mysql dump");
console.log("Migrating the rest of the way...");
console.time("Migrating after importing mysql dump");
return exports.migrateUp(dbName); // Run the rest of your migrations
}).then(() => {
console.timeEnd("Migrating after importing mysql dump");
});
};