I tried Redis on my node.js server before, the script looks like this:
//...
redisClient = redis.createClient();
redisClient.on("connect", function (err) {
console.log("redis terkoneksi");
});
redisClient.on("error", function (err) {
console.log("Redis Error " + err);
});
//...
//redisClient.set(...
//redisClient.get(...
At this time I want to try to install the redis loopback using the 'loopback-connector-redis' plugin. I have installed NPM, but I don't know how to use it. I have tried writing like this:
var DataSource = require('loopback-datasource-juggler').DataSource;
var ds = new DataSource('redis');
ds = redis.createClient(); //script error (createClient is not function)
ds.on("connect", function (err) {
console.log("redis terkoneksi");
});
ds.on("error", function (err) {
console.log("Redis Error " + err);
});
there are two questions I have:
1. how to use redis on loopback?
2. How to write correctly so that it can refer to the redis 'set', 'get', etc?
I will receive all the answers and suggestions, thank you.
best regards.
LoopBack provides two flavors of Redis connector.
Community-maintained loopback-connector-redis which is implementing CRUD-like storage API using Redis as the "database".
loopback-connector-kv-redis providing a key-value API that's closer to how Redis is typically used.
Since you are mentioning set and get commands, my recommendation is to use the KV connector. See https://github.com/strongloop/loopback-example-kv-connectors/tree/master/redis.lb3x for a full working example.
(1)
Create a datasource backed by the Redis KV connector.
Example: server/datasources.json
{
"db": {
"host": "127.0.0.1",
"port": 6379,
"name": "db",
"connector": "kv-redis"
}
}
(2)
Create a new model to represent the KeyValue data and operations.
Example: common/models/color.json
{
"name": "Color",
"base": "KeyValueModel",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {},
"validations": [],
"relations": {},
"acls": [],
"methods": {}
}
(3)
Attach the model to the datasource.
Example: server/model-config.json
{
"Color": {
"dataSource": "db",
"public": true
}
}
Now you can access all KeyValueModel methods on your model, either via REST API or from JavaScript:
const Color = app.models.Color;
await Color.set('my key', 'my value', {/*options*/});
Related
I am working on a nuxt project and I want to add it to Google Play, but it requires an apk output
so is there any solution to get the apk file from Nuxt?
I've already tried using android studio but it was unsuccessful
manifest.json:
{
"name": "my nuxt app",
"short_name": "my lovely nuxt app",
"description": "pwa to apk",
"icons": [
{
"src": "/logo.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/300.png",
"sizes": "384x384",
"type": "image/jpg"
},{
"src": "/512.jpg",
"sizes": "512x512",
"type": "image/jpg"
}
],
"start_url": "/?standalone=true",
"display": "standalone",
"background_color": "#222",
"theme_color": "#222",
"lang": "fa",
"prefer_related_applications": true
}
and I get this error when I want to install it:
for security your phone is set to block installation
TWA are a thing as you can read here: https://www.ateamsoftsolutions.com/what-are-pwa-and-twa/
Meanwhile, this is not the same as having an .apk which is something totally different from the Web platform as you can see here: https://fileinfo.com/extension/apk (none of the extensions are ones used on the Web)
This is a totally different bundle language and ecosystem. Hence, you cannot port a PWA into a Google Play app.
You'll need to learn ways to make a mobile app with either Capacitor (Quasar) can help or similar solutions.
Or use React Native, Flutter or even vanilla Kotlin (the latter being the closest one to the machine).
In addition to kissu's comment, I always use Nuxt.js for regular websites but Ionic/Vue with Capacitor for mobile apps, it works great, same ecosystem and a great UI components and CLI from Ionic. This is just a suggestion for something that works and it has a minimum learning curve.
after so many searches and thanks to #kissu for give me a hint about twa i found the solution:
1.first of all u need a service worker for your nuxt project and put it in the static folder
example:
/static/sw.js
and inside of sw.js:
const options = {"workboxURL":"https://cdn.jsdelivr.net/npm/workbox-cdn#5.1.4/workbox/workbox-sw.js","importScripts":[],"config":{"debug":false},"cacheOptions":{"cacheId":"online-actor-prod","directoryIndex":"/","revision":"c35hcbL1ctml"},"clientsClaim":true,"skipWaiting":true,"cleanupOutdatedCaches":true,"offlineAnalytics":false,"preCaching":[{"revision":"c35hcbL1ctml","url":"/"}],"runtimeCaching":[{"urlPattern":"/_nuxt/","handler":"CacheFirst","method":"GET","strategyPlugins":[]},{"urlPattern":"/","handler":"NetworkFirst","method":"GET","strategyPlugins":[]}],"offlinePage":null,"pagesURLPattern":"/","offlineStrategy":"NetworkFirst"}
importScripts(...[options.workboxURL, ...options.importScripts])
initWorkbox(workbox, options)
workboxExtensions(workbox, options)
precacheAssets(workbox, options)
cachingExtensions(workbox, options)
runtimeCaching(workbox, options)
offlinePage(workbox, options)
routingExtensions(workbox, options)
function getProp(obj, prop) {
return prop.split('.').reduce((p, c) => p[c], obj)
}
function initWorkbox(workbox, options) {
if (options.config) {
// Set workbox config
workbox.setConfig(options.config)
}
if (options.cacheNames) {
// Set workbox cache names
workbox.core.setCacheNameDetails(options.cacheNames)
}
if (options.clientsClaim) {
// Start controlling any existing clients as soon as it activates
workbox.core.clientsClaim()
}
if (options.skipWaiting) {
workbox.core.skipWaiting()
}
if (options.cleanupOutdatedCaches) {
workbox.precaching.cleanupOutdatedCaches()
}
if (options.offlineAnalytics) {
// Enable offline Google Analytics tracking
workbox.googleAnalytics.initialize()
}
}
function precacheAssets(workbox, options) {
if (options.preCaching.length) {
workbox.precaching.precacheAndRoute(options.preCaching, options.cacheOptions)
}
}
function runtimeCaching(workbox, options) {
const requestInterceptor = {
requestWillFetch({ request }) {
if (request.cache === 'only-if-cached' && request.mode === 'no-cors') {
return new Request(request.url, { ...request, cache: 'default', mode: 'no-cors' })
}
return request
},
fetchDidFail(ctx) {
ctx.error.message =
'[workbox] Network request for ' + ctx.request.url + ' threw an error: ' + ctx.error.message
console.error(ctx.error, 'Details:', ctx)
},
handlerDidError(ctx) {
ctx.error.message =
`[workbox] Network handler threw an error: ` + ctx.error.message
console.error(ctx.error, 'Details:', ctx)
return null
}
}
for (const entry of options.runtimeCaching) {
const urlPattern = new RegExp(entry.urlPattern)
const method = entry.method || 'GET'
const plugins = (entry.strategyPlugins || [])
.map(p => new (getProp(workbox, p.use))(...p.config))
plugins.unshift(requestInterceptor)
const strategyOptions = { ...entry.strategyOptions, plugins }
const strategy = new workbox.strategies[entry.handler](strategyOptions)
workbox.routing.registerRoute(urlPattern, strategy, method)
}
}
function offlinePage(workbox, options) {
if (options.offlinePage) {
// Register router handler for offlinePage
workbox.routing.registerRoute(new RegExp(options.pagesURLPattern), ({ request, event }) => {
const strategy = new workbox.strategies[options.offlineStrategy]
return strategy
.handle({ request, event })
.catch(() => caches.match(options.offlinePage))
})
}
}
function workboxExtensions(workbox, options) {
}
function cachingExtensions(workbox, options) {
}
function routingExtensions(workbox, options) {
}
2.you also need a manifest , for that put this code in your nuxt.config.js:
export default{
pwa: {
manifest: {
name: 'example name',
short_name: 'example',
lang: 'fa',
theme_color: '#222',
background_color: '#222',
start_url: `/`,
prefer_related_applications: true,
},
icon: {
fileName: 'logo.png'
},
},
}
3.now everything is ready to create your apk, now you can search for pwa to apk in google And use sites that offer these services:
ive already tried these sites and all working well:
gonative.io
or
pwabuilder.com
I'm making a blog website on a stack - typescript with typeorm sitting on the node /w express. I've successfully implemented authorization, I'm able to register user, then log in with proper cred., but to my suprise - my database seems to be empty (in pgAdmin, and also in psql using SELECT * FROM user;)
I'm sure that's some silly mistake or my misunderstanding, yet - I just don't understand why it happens, I'm confused.
ormconfig.json
{
"name": "development",
"type": "postgres",
"host": "localhost",
"port": 5432,
"username": "postgres",
"password": "secret",
"database": "bloo_dev", <-- my database
"synchronize": true,
"logging": true,
"entities": ["dist/**/*.entity{.ts,.js}"],
"migrations": ["src/migration/**/*.ts"],
"subscribers": ["src/subscriber/**/*.ts"],
"cli": {
"entitiesDir": "src/entity",
"migrationsDir": "src/migration",
"subscribersDir": "src/subscriber"
}
ormConnect.ts
import { User } from "../entity/User.entity";
import { Article } from "../entity/Article.entity";
export const ormConnect = async () => {
const options = await getConnectionOptions(process.env.NODE_ENV);
return process.env.NODE_ENV === "production"
? createConnection({
...options,
url: process.env.DATABASE_URL,
entities: [User, Article],
name: "default",
} as any)
: createConnection({ <-- this one gets called
...options,
//url: process.env.DATABASE_URL,
entities: [User, Article],
name: "default",
} as any);
};
And everything seems to work completely fine from the app perspective, I'm sure that I've added new user, becouse I can log in with his cred. later.
How to send a unique value(UUID4) to the step function is the json format, when it was triggered every time from the cloudwatch event rules?
Can any one help on this. Thanks in advance.
AWS Step Functions now provides an intrinsic function to generate a UUIDv4 string in the ASL definition:
https://docs.aws.amazon.com/step-functions/latest/dg/amazon-states-language-intrinsic-functions.html#asl-intrsc-func-uuid-generate
Example:
"uuid.$": "States.UUID()"
// output
{ "uuid": "ca4c1140-dcc1-40cd-ad05-7b4aa23df4a8" }
Since AWS recently added Step Functions as a CloudWatch target, you could easily make the first function in the Step Function generate the UUID. And then pass that down to the next function. This would allow the Step Function to be self contained.
"GenerateUuid": {
"Type": "Task",
"Resource": "arn:aws:states:us-east-1:123456789012:activity:GenerateUuid",
"Next": "CreateNewRecord"
},
Another way is to have a Lambda function generate the UUID and pass is into the State Machine which you could again trigger from a CloudWatch event.
var params = {
stateMachineArn: 'MyStateMachine-12345',
input: uuidv4()
};
stepfunctions.startExecution(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
A little late to the party, but in case someone else comes across this...
When you setup your cloudwatch event you can set Input Transformers on it and luckily the event that is fired has an ID that should solve your problem without adding any extra lambdas to your state machine.
Below is an example of setting it up so that the input to your state machine will like this:
{
"meta": {
"uniqueId": "96c80428-14fe-c8d0-f6e3-639384992391"
}
}
Using the AWS console:
Screenshot
Using code (JS):
const targets = [
{
Arn: "my-state-machine-arn",
Id: "my cloud watch rule name",
InputTransformer: {
InputTemplate: '{"meta":{"uniqueId":<id>}}',
InputPathsMap: {
id: '$.id',
},
},
RoleArn: "my-cloudwatch-arn",
},
];
const params = {
Rule: "my cloud watch rule name",
Targets: targets,
};
cloudwatch.putTargets(params, (err, data) => {
if (err) {
// error
} else {
// success
}
});
I installed the Auth0 lock and can login on my client side, with an idToken in my localStorage.
I send this idToken to my API server - a FeathersJS server, which is basically an extension to an Express server. I get authenticated correctly using JWT, but the user is empty in my req object (it's called student here):
{ authenticated: true,
query: {},
provider: 'rest',
headers: { (truncated...) },
student: {},
payload:
{ iss: 'https://mydomain.eu.auth0.com/',
sub: 'myusername',
aud: 'myaudience',
exp: 1494125072,
iat: 1494089072 } }
The 5 last lines are the payload contained inside Auth0's idToken.
The fact that my student user object is empty is kind of normal when I think about it, because the app doesn't know how to link a Auth0 user to one of my database users. It's done by the username property. But how do I tell my Feathers app that? Is there a populateUser or something similar?
I remembered such a function in the old Feathers, but the new one uses the common hook's populate function: see here:
populateUser -> use new populate hook in feathers-hooks-common
So I tried this new populate hook, but unfortunately it's only an After hook, which doesn't make sense since I want to populate the user before making the request.
And now I'm stuck. I mean, I could write my own before hook which populates the user, but I bet there's some nicer way to achieve what I want.
Below is some relevant code:
authentication.js
const authentication = require('feathers-authentication');
const jwt = require('feathers-authentication-jwt');
const oauth2 = require('feathers-authentication-oauth2');
const Auth0Strategy = require('passport-auth0').Strategy;
module.exports = function () {
const app = this;
const config = app.get('authentication');
// Set up authentication with the secret
app.configure(authentication(config));
app.configure(jwt());
app.configure(oauth2({
name: 'auth0',
Strategy: Auth0Strategy,
}));
// The `authentication` service is used to create a JWT.
// The before `create` hook registers strategies that can be used
// to create a new valid JWT (e.g. local or oauth2)
app.service('authentication').hooks({
before: {
create: [
authentication.hooks.authenticate(config.strategies)
],
remove: [
authentication.hooks.authenticate('jwt')
]
}
});
};
config file
{
...
"authentication": {
"entity": "student",
"service": "students",
"secret": "SAME_SECRET_AS_BELOW",
"strategies": [
"jwt"
],
"path": "/authentication",
"jwt": {
"header": {
"type": "access"
},
"audience": "SAME_AUDIENCE_AS_BELOW",
"subject": "anonymous",
"issuer": "https://mydomain.eu.auth0.com/",
"algorithm": "HS256",
"expiresIn": "1d"
},
"auth0": {
"clientID": "SAME_AUDIENCE_AS_ABOVE",
"clientSecret": "SAME_SECRET_AS_ABOVE",
"domain": "https://mydomain.eu.auth0.com/"
}
}
}
What is the best way to see differences between the contents of two different Redis databases? We have a development and production deployment and our production instance doesn't seem to have the same exact data as development but we need a solid easy way to test that - is there a good way to do this without writing too much of our own code?
If your production data has a high rate of change, doing this would be tough. So for this answer let us assume you either don't have a high data churn or you can do this at a quiescent time where the data churn rate is low.
For this to work from a shell script perspective you will need to do the first task in parallel.
Use the RDB save option in redis-cli to pull down a local copy of the data for each server.
Compare a hash of the files, such as an md5sum. If they are the same, the data is the same.
If the churn rate is low enough that you can get a comparable dump, this will identify if they are different, but not what data differs if any. However you can use this as a conditional for avoiding a deeper dive which has to essentially ransack the database.
Alternatively, if they differ you could write something which uses the RDB files and one of the RDB file parsers to compare them and output the differences it finds.
Indeed this method would work far better than ransacking the server as data can change during the process whereas the dump file analysis method compares for a fixed point in time.
Because we are using Redis Sentinel I assumed I couldn't find some code to steal, so wrote my own real quick
the code compares the contents of two different Redis databases, using the ioredis Redis client NPM module. The code works fine for small Redis databases that don't change much, for larger amounts of data or for databases that are seeing a lot of writes per minute, this might not be suitable.
code looks like this:
var async = require('async');
var Redis = require('ioredis');
var REDIS_SENTINEL_MASTER_NAME_STAGING = 'smartconndevredis01';
var REDIS_SENTINEL_MASTER_NAME_PROD = 'smartconnect_curr_master';
var SENTINEL_ENDPOINTS_STAGING = [
{
"host": "XXX.XX.XX.XX",
"port": "26379"
},
{
"host": "XXX.XX.X.XX",
"port": "26379"
},
{
"host": "XXX.XX.X.XX",
"port": "26379"
}
];
var SENTINEL_ENDPOINTS_PROD = [
{
"host": "XXX.XX.X.XX",
"port": "26379"
},
{
"host": "XXX.XX.X.XX",
"port": "26379"
},
{
"host": "XXX.X.XX.XX",
"port": "26379"
}
];
var clientStaging = new Redis({
sentinels: SENTINEL_ENDPOINTS_STAGING,
name: REDIS_SENTINEL_MASTER_NAME_STAGING,
db: 0
});
var clientProd = new Redis({
sentinels: SENTINEL_ENDPOINTS_PROD,
name: REDIS_SENTINEL_MASTER_NAME_PROD,
db: 0
});
var dbStaging = {};
var dbProd = {};
async.parallel([
function (callback) {
clientStaging.keys('*', function (err, keys) {
async.each(keys, function (key, cb) {
clientStaging.get(key, function (err, result) {
//console.log('staging> key: "'+key+'", result:', result);
dbStaging[key] = result;
cb(err);
});
}, function done(err, res) {
callback(err, res);
});
});
},
function (callback) {
clientProd.keys('*', function (err, keys) {
async.each(keys, function (key, cb) {
clientProd.get(key, function (err, result) {
//console.log('production> key: "'+key+'", result:', result);
dbProd[key] = result;
cb(err);
});
}, function done(err, res) {
callback(err, res);
});
});
}
],
function done(err, results) {
if (err) {
throw err;
}
else {
diffDbs();
}
});
function diffDbs() {
Object.keys(dbStaging).forEach(function (key) {
if (!dbProd.hasOwnProperty(key)) {
console.log('staging redis has key, prod redis does not:',key);
}
else {
var stagingVal = dbStaging[key];
var prodVal = dbProd[key];
if(String(stagingVal).valueOf() != String(prodVal).valueOf()){
console.log('staging redis and prod redis have DIFFERENT values for key:',key);
}
}
});
Object.keys(dbProd).forEach(function (key) {
if (!dbStaging.hasOwnProperty(key)) {
console.log('prod redis has key, staging redis does not:',key);
}
else {
var stagingVal = String(dbStaging[key]).replace(/ /g, ''); //remove all whitespace
var prodVal = String(dbProd[key]).replace(/ /g, ''); //remove all whitespace
if(stagingVal.valueOf() != prodVal.valueOf()){
console.log('staging redis and prod redis have DIFFERENT values for key:',key);
}
}
});
process.exit(0);
}