is it possible to use imagemin-cli and keep the same folder structure of compressing files? - npm-scripts

I'm trying to create imagemin script with npm scripts and using imagemin-cli for it. First, I copy files to dist (or .tmp for development) folder and then compress images with this scripts:
package.json
...
scripts {
"copy:dev": "cpx app/src/**/*.{html,png,jpg,mp4,webm} .tmp/",
"copy:prod": "cpx app/src/**/*.{html,png,jpg,mp4,webm} dist/",
"imagemin:dev": "imagemin app/src/images/**/* -o .tmp/images/",
"imagemin:prod": "imagemin app/src/images/**/* -o dist/images/",
...
},
So, when I run these scripts, after compression all images are put inside the folder images/.
Is there a way to compress images and keep the folder structure? Maybe with another plugin or something else.

Is it a way to compress images with keeping folder structure?
The short answer is no, not with imagemin-cli
imagemin, (the API imagemin-cli is built upon), does not provide a mechanism to preserve the folder structure. See open issue/feature-request #191 in the projects github repo.
Solution
A cross platform way to achieve your requirements is to write a custom node.js utility script that utilizes the imagemin API directly. So effectively... build your own CLI tool that can be run via npm-scripts.
The following gists show how this can be achieved...
imagemin.js
The utility node script is as follows:
#!/usr/bin/env node
'use strict';
var path = require('path');
var readline = require('readline');
var Imagemin = require('imagemin');
var outdir = process.env.PWD; // Default output folder.
var verbose = false; // Default no logging.
// The folder name specified MUST exist in the `glob` pattern of the npm-script.
var DEST_SUBROOT_FOLDER = 'images';
// Nice ticks for logging aren't supported via cmd.exe
var ticksymbol = process.env.npm_config_shell.indexOf('bash') !== -1 ? '✔' : '√';
var rl = readline.createInterface({
input: process.stdin,
output: null,
terminal: false
});
// Handle the optional `-o` argument for the destination folder.
if (process.argv.indexOf('-o') !== -1) {
outdir = process.argv[process.argv.indexOf('-o') + 1];
}
// Handle the optional `-v` argument for verbose logging.
if (process.argv.indexOf('-v') !== -1) {
verbose = true;
}
/**
* Utilizes the Imagemin API to create a new instance for optimizing each image.
* #param {String} srcpath - The filepath of the source image to optimize.
* #param {String} destpath - The destination path to save the resultant file.
* #param {Function} - The relevent `use` plugin (jpegtran|optipng|gifsicle).
*/
function imagemin(srcpath, destpath, plugin) {
var im = new Imagemin()
.src(srcpath)
.dest(destpath)
.use(plugin);
im.optimize(function (err, file) {
if (err) {
console.error('Error: ' + err);
process.exit(1);
}
if (file && verbose) {
console.log('\x1b[32m%s\x1b[0m', ticksymbol, destpath);
}
});
}
/**
* Obtains the destination path and file suffix from the original source path.
* #param {String} srcpath - The filepath for the image to optimize.
* #return {{dest: String, type: String}} dest path and ext (.jpg|.png|.gif).
*/
function getPathInfo(srcpath) {
var ext = path.extname(srcpath),
parts = srcpath.split(path.sep),
subpath = parts.slice(parts.indexOf(DEST_SUBROOT_FOLDER), parts.length);
subpath.unshift(outdir);
return {
dest: path.normalize(subpath.join(path.sep)),
ext: ext
};
}
/**
* Triggers the relevent imagemin process according to file suffix (jpg|png|gif).
* #param {String} srcpath - The filepath of the image to optimize.
*/
function optimizeImage(srcpath) {
var p = getPathInfo(srcpath);
switch (p.ext) {
case '.jpg':
imagemin(srcpath, p.dest, Imagemin.jpegtran({ progressive: true }));
break;
case '.png':
imagemin(srcpath, p.dest, Imagemin.optipng({ optimizationLevel: 5 }));
break;
case '.gif':
imagemin(srcpath, p.dest, Imagemin.gifsicle({ interlaced: true }));
break;
}
}
// Read each line from process.stdin (i.e. the filepath)
rl.on('line', function(srcpath) {
optimizeImage(srcpath);
});
Note: The code above uses version 1.0.5 of the imagemin API and not the latest version - Why? See point 1 under the Additional Notes section below.)
Uninstall and Install new packages
Firstly uninstall imagemin-cli as it's no longer necessary:
$ npm un -D imagemin-cli
Next install imagemin version 1.0.5 (This is an older package so may take npm longer to install than usual)
$ npm i -D imagemin#1.0.5
Then install cli-glob. This will be used to specify the glob pattern to match the images for optimizing.
$ npm i -D cli-glob
npm-scripts
Update your npm-scripts as follows:
...
"scripts": {
"imagemin:prod": "glob \"app/src/images/**/*.{png,jpg,gif}\" | node bin/imagemin -v -o dist",
"imagemin:dev": "glob \"app/src/images/**/*.{png,jpg,gif}\" | node bin/imagemin -v -o .tmp",
...
},
...
Note: To optimize images using the gists shown above it's not necessary to use the two scripts named copy:prod and copy:dev shown in your original post/question)
The glob \"app/src/... part of the script above uses cli-glob to match the necessary image source files.
The paths are then piped to the imagemin.js utility node script.
When the -v (verbose) argument/flag is included then each processed image is logged to the console. To omit logging simply remove the -v flag.
The -o (output) argument/flag is used to specify the destination folder name. E.g. dist or .tmp. When the value for -o is omitted the resultant images are output to the project root directory.
Additional notes:
The reason for using imagemin version 1.0.5 is because this API
allows the src value to be specified as a single filepath. In versions greater than 2.0.0 the API expects the src value to be a glob pattern as shown in the latest version 5.2.2.
The gists above assume imagemin.js is saved to a folder named bin which exists in the same folder as package.json. It can be changed to a preferred name, or an invisible folder by prefixing it with a dot [.] e.g. .scripts or .bin. Whatever you choose, you'll need to update the path to the script in npm-scripts accordingly.

Update 2020
There's an unmerged (as of mid-june 2020) pull request by Gijs Rogé that enables preserving directory structure in the output directory.
You can install npm modules not yet listed in the registry by installing directly from Github, referencing a repo and even specific commit:
npm install https://github.com/<username>/<repository>#<commit> --save-dev
To install imagemin with Gijs Rogé’s fix, run...
npm install https://github.com/imagemin/imagemin#bfd7c547045f68ed92243c6a772f6265a08a687f --save-dev
...and enable the new option in your script by setting preserveDirectories: true:
// Note: imports and plugin configs have been omitted for brevity
const imagemin = require('imagemin');
const imageminMozjpeg = require('imagemin-mozjpeg');
...
(async () => {
const files = await imagemin(['input_dir/**/*.{jpg,jpeg,png,svg}'], {
destination: 'output_dir/',
✨preserveDirectories: true,
plugins: [
imageminMozjpeg( ... ),
imageminPngquant( ... ),
imageminSvgo( ... )
]
});
A .jpg found in input_dir/some/sub/dir/image.jpg will now be processed and written to output_dir/input_dir/some/sub/dir/image.jpg.
Use destination: '.' to overwrite original files in place.

i had also the same problem but i changes the index.js file of imagemin in node modules. please copy paste the code in node modules
'use strict';
const fs = require('fs');
const path = require('path');
const fileType = require('file-type');
const globby = require('globby');
const makeDir = require('make-dir');
const pify = require('pify');
const pPipe = require('p-pipe');
const replaceExt = require('replace-ext');
const fsP = pify(fs);
const handleFile = (input, output, options) => fsP.readFile(input).then(data => {
const dest = output ? output : null;
if (options.plugins && !Array.isArray(options.plugins)) {
throw new TypeError('The `plugins` option should be an `Array`');
}
const pipe = options.plugins.length > 0 ? pPipe(options.plugins)(data) : Promise.resolve(data);
return pipe
.then(buffer => {
const ret = {
data: buffer,
path: (fileType(buffer) && fileType(buffer).ext === 'webp') ? replaceExt(dest, '.webp') : dest
};
if (!dest) {
return ret;
}
return fsP.writeFile(ret.path, ret.data)
.then(() => ret)
.then(function(result) {})
})
.catch(error => {
error.message = `Error in file: ${input}\n\n${error.message}`;
throw error;
});
});
module.exports = (input, output, options) => {
if (!Array.isArray(input)) {
return Promise.reject(new TypeError(`Expected an \`Array\`, got \`${typeof input}\``));
}
if (typeof output === 'object') {
options = output;
output = null;
}
options = Object.assign({plugins: []}, options);
options.plugins = options.use || options.plugins;
return globby(input, {onlyFiles: true}).then(paths => Promise.all(paths.map(x => handleFile(x, output, options))));
};
module.exports.buffer = (input, options) => {
if (!Buffer.isBuffer(input)) {
return Promise.reject(new TypeError(`Expected a \`Buffer\`, got \`${typeof input}\``));
}
options = Object.assign({plugins: []}, options);
options.plugins = options.use || options.plugins;
if (options.plugins.length === 0) {
return Promise.resolve(input);
}
return pPipe(options.plugins)(input);
};

The following script runs a separate imagemin job for each folder.
It solves the same problem.
const path = require('path');
const fs = require('fs');
const imagemin = require('imagemin');
const imageminWebp = require('imagemin-webp');
const COMPRESSED_FOLDER = '__compressed';
const TIMER_NAME = 'compressed';
(async () => {
console.time(TIMER_NAME);
const publicPath = path.resolve(__dirname, '../public');
const compressedFolderRegExp = new RegExp(COMPRESSED_FOLDER);
const publicPathRegExp = new RegExp(publicPath);
const folders = getAllDirectories(publicPath).filter(
(directoryName) => !directoryName.match(compressedFolderRegExp)
);
await Promise.all(
folders.map(async (folderPath) => {
const destination = folderPath.replace(
publicPathRegExp,
`${publicPath}/${COMPRESSED_FOLDER}`
);
console.log('compressing...', destination);
return imagemin([`${folderPath}/*.{jpg,png}`], {
destination,
plugins: [imageminWebp({ quality: 50 })],
});
})
);
console.timeEnd(TIMER_NAME);
process.exit();
})();
function getAllDirectories(filepath) {
const directoryPaths = fs
.readdirSync(filepath, { withFileTypes: true })
.filter((d) => d.isDirectory())
.map(({ name }) => `${filepath}/${name}`);
const childDirectories = directoryPaths.reduce(
(acc, directoryPath) => acc.concat(getAllDirectories(directoryPath)),
[]
);
return [filepath, ...childDirectories];
}

Related

How to download files through development vscode extension on code-server?

I wrote a vscode extension. Now you want to download a file in the vscode working directory by developing extensions. But no files were obtained through vscode vscode.Uri.file.
const downloadPanel = vscode.window.createWebviewPanel(
"view",
"下载",
vscode.ViewColumn.Two,
{
enableScripts: true,
retainContextWhenHidden: true,
}
)
if (vscode.workspace.workspaceFolders === undefined) {
throw new Error("not found!");
}
const filePath = vscode.workspace.workspaceFolders[0].uri.fsPath;
let downloadContent = vscode.commands.registerCommand('download.click', () => {
console.log("filePath = " + filePath);
const onDiskPath = vscode.Uri.file(
path.join(context.extensionPath, "resources","blockchain.svg")
);
// And get the special URI to use with the webview
const catGifSrc = panel.webview.asWebviewUri(onDiskPath) + "";
getWebviewContent(catGifSrc);
function getWebviewContent(_src: string) {
return '<html><head><script></script></script></head><body><div>download</div></body></html>';
}
When clicking the link, the file is not found! Currently, only nginx proxy can be used for full path downloading. Is there any other plan or solution?

How to copy files and folders using fs-extra in Nuxt

i'm on Nuxtjs 2.15.4 and I'm trying to create multi themes for my app.
The flow is very simple: I have a themes folder with my themes folder within. At nuxt build/dev a module will be called:
const path = require('path')
const chokidar = require('chokidar');
const fse = require('fs-extra');
export default async function (moduleOptions) {
// moduleOptions.themeName = 'newtheme' & moduleOptions.customizeTheme = 'false'
const themeName = moduleOptions.themeName
const defaultThemeDir = path.join(this.options.rootDir, './themes/main')
const newThemeDir = path.join(this.options.rootDir, './themes/'+moduleOptions.themeName)
const sourceDir = path.join(this.options.rootDir, './')
const emptySourceDir = async () => {
fse.emptyDir(path.join(this.options.rootDir, 'assets'))
fse.emptyDir(path.join(this.options.rootDir, 'components'))
fse.emptyDir(path.join(this.options.rootDir, 'layouts'))
fse.emptyDir(path.join(this.options.rootDir, 'middleware'))
fse.emptyDir(path.join(this.options.rootDir, 'pages'))
fse.emptyDir(path.join(this.options.rootDir, 'plugins'))
fse.emptyDir(path.join(this.options.rootDir, 'static'))
}
const copyThemesDirectory = async () => {
await fse.copy(path.join(defaultThemeDir, 'base'), sourceDir)
if(themeName !== 'main'){
await fse.copy(path.join(newThemeDir, 'base'), sourceDir)
}
if(moduleOptions.customizeTheme === 'true'){
await fse.copy(path.join(newThemeDir, 'custom'), sourceDir)
}
}
const toTargetPath = (oldPath) => {
let newPath = oldPath.replace(this.options.rootDir, '')
.replace('\\themes\\main\\base\\', '\\')
.replace('\\themes\\main\\custom\\', '\\')
.replace('\\themes\\'+themeName+'\\base\\', '\\')
.replace('\\themes\\'+themeName+'\\custom\\', '\\')
return path.join(this.options.rootDir, newPath)
}
await emptySourceDir()
await copyThemesDirectory()
if(process.env.NODE_ENV === 'development'){
chokidar.watch([defaultThemeDir, newThemeDir]).on('all', async (event, filePath) => {
if (event === 'add' || event === 'change') {
fse.copy(filePath, toTargetPath(filePath))
}
if (event === 'unlink') {
fse.remove(toTargetPath(filePath))
}
})
}
}
it will empty the some folders in nuxt root directory, and then copy themes/main/base into them. after that it will check if theme name is not "main" it will copy themes/{themeName}/base into root directory. then it will check for customization option and if true, it will copy theme/{themeName}/custom folders to root Directory.
this part is done without problem! the second part that is for development mode, gives me error that such files (new files that added to new theme or custom) don't exist.
that part with the help of chokidar watch my themes folder and if anything is changed it will remove or copy that on root directory. the error is shown if the same file exits in the main theme base folders.
I even tried fse.copy(filePath, toTargetPath(filePath),{overwrite: true}) and fse.copySync(filePath, toTargetPath(filePath),{overwrite: true})
here is an example of errors:
ERROR ENOENT: no such file or directory, unlink '{my-local-directory}\components\global\footer\sub\links.vue'
anyone know what's wrong with it?? it also run the watch change or add part even on first try when simply copying from themes to root.
#UPDATE
looks like there is error even in build. I build and it gives me
Error: ENOENT: no such file or directory, mkdir '/var/www/static/images/folder'
then again when i'm building it run without error. this is my themes folder structure:
-themes
|__ main
| |__base
| | |__assets
| | |__components
| | |__middleware
| | |__layouts
| | |__pages
| | |__plugins
| | |__static
| |
| |__custom
|
|__ newtheme
|__base
|__custom

How to assert the values in a downloaded file using Cypress

I am downloading a zip file which has a json zipped. Using cy.readfile, I am able to read the content but I am not sure what commands can be used to assert on the values inside.
(Please let me know if there is a way to unzip the file before reading)
I need to verify I have 3 objectids present in the json and also some values of the elements.
I tried the below approach, but it did not work.
cy.readFile(`/Users/${username}/Downloads/${fileName}.zip`)
.should('contain','objectid').and('have.length',3);
The above command did not work for me :(
Could someone help me with some examples? I am new to cypress and coding,therefore struggling a little.
You can change the download folder in every test case!!
Look into your index.js in -> cypress -> plugins -> index.js and write this :
module.exports = (on, config) => {
on('before:browser:launch', (browser, options) => {
const downloadDirectory = 'C:\\downloads\\'; // this is the path you want to download
options.preferences.default['download'] = { default_directory: downloadDirectory };
return options;
});
};
Do it like this
cy.readFile(`/Users/${username}/Downloads/${fileName}.zip`)
.then((data) => {
// you can write whatever assertions you want on data
debugger;
console.log(data);
expect(data).to....
})
You can put debugger as above and logs to check what data contains and then assert
Use this link to know about available assertions https://docs.cypress.io/guides/references/assertions.html#BDD-Assertions
So here is the approach I am following.It is quite lengthy, but still posting as it might be helpful for someone.Please comment if you have any suggestions for improvements here.
I am using npm-unzipper to unzip the downloaded file.
Step 1: $ npm install unzipper
Step 2:In plugins > index.js
const fs = require('fs');
const os = require('os');
const osplatform = os.platform();
const unzipper = require('unzipper');
const userName = os.userInfo().username;
let downloadPath =`/${userName}/Downloads/`;
if (osplatform == 'win32'){
downloadPath = `/Users/${userName}/Downloads/`;
}
on('task', {
extractzip(zipname) {
const zipPath = downloadPath + zipname;
if (fs.existsSync(zipPath)) {
const readStream = fs.createReadStream(zipPath);
readStream.pipe(unzipper.Extract({path: `${downloadPath}`}));
const jsonname = 'testfile.json'
const jsonPath = downloadPath + jsonname;
return jsonPath;
}
else{
console.error('file not downloaded')
return null;
}
}
})
Step 3:support > commands.js
Cypress.Commands.add('comparefiles', { prevSubject: false }, (subject, options = {}) => {
cy.task('extractzip', 'exportfile.zip').then((jsonPath) => {
cy.fixture('export.json').then((comparefile) => {
cy.readFile(jsonPath).then((exportedfile) => {
var exported_objectinfo = exportedfile.objectInfo;
var compare_objectinfo = comparefile.objectInfo;
var exported_metaInfo = exportedfile.metaInfo;
var compare_metaInfo = comparefile.metaInfo;
expect(exported_objectinfo).to.contain.something.like(compare_objectinfo)
expect(exported_metaInfo).to.have.deep.members(compare_metaInfo)
})
})
});
});
Step 4: specs > exportandcompare.js
cy.get('[data-ci-button="Export"]').click();
cy.comparefiles();

Import SQL dump within Node environment

I'd like a npm script to create/configure/etc. and finally import a SQL dump. The entire creation, configuring, etc. is all working, however, I cannot get the import to work. The data never is inserted. Here's what I have (nevermind the nested callback as they'll be turned into promises):
connection.query(`DROP DATABASE IF EXISTS ${config.database};`, err => {
connection.query(`CREATE DATABASE IF NOT EXISTS ${config.database};`, err => {
connection.query('use DATABASENAME', err => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
connection.query(`SOURCE ${sqlDumpPath}`, err => {
connection.end(err => resolve());
});
})
});
});
I also tried the following with Sequelize (ORM):
return new Promise(resolve => {
const sqlDumpPath = path.join(__dirname, 'sql-dump/sql-dump.sql');
fs.readFile('./sql/dump.sql', 'utf-8', (err, data) => {
sequelize
.query(data)
.then(resolve)
.catch(console.error);
});
});
Here's how I set up my initial Sequelized import using the migrations framework. There is plenty of going on here but in short I:
find the latest sql-dump in the migrations folder
read the file using fs
split the text into queries
check if its a valid query and if so apply some cleaning that my data required (see related post)
push an array full of queries - I start with making sure that the database is clean by calling the this.down first
run everything as a promise (as suggested here) using the mapSeries (not the map)
Using sequelize-cli you can in your shell create a migration by writing:
sequelize migration:create
And you will automatically have the file where you enter the code below. In order to execute the migration you simply write:
sequelize db:migrate
"use strict";
const promise = require("bluebird");
const fs = require("fs");
const path = require("path");
const assert = require("assert");
const db = require("../api/models"); // To be able to run raw queries
const debug = require("debug")("my_new_api");
// I needed this in order to get some encoding issues straight
const Aring = new RegExp(String.fromCharCode(65533) +
"\\" + String.fromCharCode(46) + "{1,3}", "g");
const Auml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(44) + "{1,3}", "g");
const Ouml = new RegExp(String.fromCharCode(65533) +
String.fromCharCode(45) + "{1,3}", "g");
module.exports = {
up: function (queryInterface, Sequelize) {
// The following section allows me to have multiple sql-files and only use the last dump
var last_sql;
for (let fn of fs.readdirSync(__dirname)){
if (fn.match(/\.sql$/)){
fn = path.join(__dirname, fn);
var stats = fs.statSync(fn);
if (typeof last_sql === "undefined" ||
last_sql.stats.mtime < stats.mtime){
last_sql = {
filename: fn,
stats: stats
};
}
}
}
assert(typeof last_sql !== "undefined", "Could not find any valid sql files in " + __dirname);
// Split file into queries
var queries = fs.readFileSync(last_sql.filename).toString().split(/;\n/);
var actions = [{
query: "Running the down section",
exec: this.down
}]; // Clean database by calling the down first
for (let i in queries){
// Skip empty queries and the character set information in the 40101 section
// as this would most likely require a multi-query set-up
if (queries[i].trim().length == 0 ||
queries[i].match(new RegExp("/\\*!40101 .+ \\*/"))){
continue;
}
// The manual fixing of encoding
let clean_query = queries[i]
.replace(Aring, "Å")
.replace(Ouml, "Ö")
.replace(Auml, "Ä");
actions.push({
query: clean_query.substring(0, 200), // We save a short section of the query only for debugging purposes
exec: () => db.sequelize.query(clean_query)
});
}
// The Series is important as the order isn't retained with just map
return promise.mapSeries(actions, function(item) {
debug(item.query);
return item.exec();
}, { concurrency: 1 });
},
down: function (queryInterface, Sequelize) {
var tables_2_drop = [
"items",
"users",
"usertypes"
];
var actions = [];
for (let tbl of tables_2_drop){
actions.push({
// The created should be created_at
exec: () => db.sequelize.query("DROP TABLE IF EXISTS `" + tbl +"`")
});
}
return promise.map(actions, function(item) {
return item.exec();
}, { concurrency: 1 });/**/
}
};
Based loosely on Max Gordon's answer, here's my code to run a MySQL Dump file from NodeJs/Sequelize:
"use strict";
const fs = require("fs");
const path = require("path");
/**
* Start off with a MySQL Dump file, import that, and then migrate to the latest version.
*
* #param dbName {string} the name of the database
* #param mysqlDumpFile {string} The full path to the file to import as a starting point
*/
module.exports.migrateFromFile = function(dbName, mysqlDumpFile) {
let sequelize = createSequelize(dbName);
console.log("Importing from " + mysqlDumpFile + "...");
let queries = fs.readFileSync(mysqlDumpFile, {encoding: "UTF-8"}).split(";\n");
console.log("Importing dump file...");
// Setup the DB to import data in bulk.
let promise = sequelize.query("set FOREIGN_KEY_CHECKS=0"
).then(() => {
return sequelize.query("set UNIQUE_CHECKS=0");
}).then(() => {
return sequelize.query("set SQL_MODE='NO_AUTO_VALUE_ON_ZERO'");
}).then(() => {
return sequelize.query("set SQL_NOTES=0");
});
console.time("Importing mysql dump");
for (let query of queries) {
query = query.trim();
if (query.length !== 0 && !query.match(/\/\*/)) {
promise = promise.then(() => {
console.log("Executing: " + query.substring(0, 100));
return sequelize.query(query, {raw: true});
})
}
}
return promise.then(() => {
console.timeEnd("Importing mysql dump");
console.log("Migrating the rest of the way...");
console.time("Migrating after importing mysql dump");
return exports.migrateUp(dbName); // Run the rest of your migrations
}).then(() => {
console.timeEnd("Migrating after importing mysql dump");
});
};

Can I migrate a model to a datasource via the commandline slc tool?

I can migrate a model via the composer of the arc tool, and I could write a small node.js script in my app to auto-migrate the model to my MySQL database, configured as a datasource, but I would really love to simply type something from the slc commandline, as part of my create process from command line to migrate my model. For example:
slc loopback:migrate --datasources=server/datasources.json --model-config=server/model-config.json --datasource=mymysqldb
You can do it by creating a script in /server/bin/automigrate.js
var path = require('path');
var app = require(path.resolve(__dirname, '../server'));
var models = require(path.resolve(__dirname, '../model-config.json'));
var datasources = require(path.resolve(__dirname, '../datasources.json'));
function autoMigrateAll(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
app.dataSources[models[key].dataSource].automigrate(key, function (err) {
if (err) throw err;
console.log('Model ' + key + ' migrated');
});
}
}
});
}
autoMigrateAll();
And run by using command
cd toYourProjectFolder
node server/bin/automigrate.js
Make sure you have your datasource.json is configure with MySQL
and applied to model-config.json
Cheers
You cannot do that from the slc loopback command line tool as of yet. Please feel free to submit a feature request at https://github.com/strongloop/loopback/issues.
At the moment, you would have to create a simple script that calls the automigrate command as you've already been doing.
I have multiple type of databases (connectors) so I made this script which works for me:
var path = require('path');
var app = require(path.resolve(__dirname, '../server'));
var dataSources = require(path.resolve(__dirname, '../datasources.json'));
autoUpdateAll();
// ------------------------------------
function autoUpdateAll(){
Object.keys(dataSources).forEach(function(dataSourceName) {
var dataSourceObj = app.dataSources[dataSourceName];
if(!dataSourceObj) return;
dataSourceObj.autoupdate(function(err, result) {
if(err) return console.error(err);
console.log('Datasource ' + dataSourceName + ' update (autoupdate();)');
});
});
}