Ignoring a new file with vue's dev server - vue.js

I'm using the pre-build-webpack plugin to merge several json files into 1 json array every time I start my app (npm run serve or npm run build), but the problem is that it gets caught in an infinite webpack compile loop in when I start the development server. I managed to find a solution to the problem by using the watch-ignore-webpack-plugin plugin, which initially seemed to have resolved the issue - webpack will now compile everything twice (it seems) and then it's good to go and I can access my local server. But the problem now is that when I visit localhost:8080 there's nothing. The screen's blank and there's nothing being console.log()ed, so I don't know what to do anymore.
If anyone's seen anything like this or know how to fix it, please let me know. If you require any additional info, also let me know.
Versions:
vue: 2.6.10 (as seen in package.json)
vue-cli: 3.11.0 (running vue -V in cmd)
pre-build-webpack: 0.1.0
watch-ignore-webpack-plugin: 1.0.0
webpack-log: 3.0.1
vue.config.js (with everything irrelevant removed):
const WebpackPreBuildPlugin = require('pre-build-webpack');
const WatchIgnorePlugin = require('watch-ignore-webpack-plugin');
module.exports = {
configureWebpack: {
plugins: [
new WebpackPreBuildPlugin(() => {
const fs = require('fs');
const glob = require('glob');
const log = require('webpack-log')({ name: 'ATTENTION!' });
const output = [];
const exclude = [];
glob('./src/components/mods/**/*.json', (err, paths) => {
paths.forEach(path => {
const content = JSON.parse(fs.readFileSync(path, 'utf-8'));
const pathSplit = path.split('/');
const modFolderName = pathSplit[pathSplit.length - 2]
if(!output.filter(val => val.id === content.id)[0]) {
if(exclude.indexOf(modFolderName) === -1) {
output.push(content);
} else {
log.warn(`SKIPPING CONTENTS OF "${modFolderName}"`);
}
} else {
log.error(`MOD WITH ID "${content.id}" ALREADY EXISTS!`);
process.exit(0);
}
});
// If I take out this line, the infinite loop doesn't occur, but then, of
// course, I don't get my merged json file either.
fs.writeFileSync('./src/config/modules/layoutConfig.json', JSON.stringify(output));
});
}),
// Neither of the blow paths work.
new WatchIgnorePlugin([/\layoutConfig.json$/]),
// new WatchIgnorePlugin(['./src/config/modules/layoutConfig.json']),
]
}
};

Related

"The original argument must be of type function" ERROR for promisifying client.zrem?

I am making a cron job instance that is running using Node to run a job that removes posts from my Redis cache.
I want to promisify client.zrem for removing many posts from the cache to insure they are all removed but when running my code I get the error below on line: "client.zrem = util.promisify(client.zrem)"
"TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type function. Received undefined"
I have another Node instance that runs this SAME CODE with no errors, and I have updated my NPM version to the latest version, according to a similar question for this SO article but I am still getting the error.
TypeError [ERR_INVALID_ARG_TYPE]: The "original" argument must be of type Function. Received type undefined
Any idea how I can fix this?
const Redis = require("redis")
const util = require(`util`)
const client = Redis.createClient({
url: process.env.REDIS,
})
client.zrem = util.promisify(client.zrem) // ERROR THROWN HERE
// DELETE ONE POST
const deletePost = async (deletedPost) => {
await client.zrem("posts", JSON.stringify(deletedPost))
}
// DELETES MANY POSTS
const deleteManyPosts = (postsToDelete) => {
postsToDelete.map(async (post) => {
await client.zrem("posts", JSON.stringify(post))
})
}
module.exports = { deletePost, deleteManyPosts }
Node Redis 4.x introduced several breaking changes. Adding support for Promises was one of those. Renaming the methods to be camel cased was another. Details can be found at in the README in the GitHub repo for Node Redis.
You need to simply delete the offending line and rename the calls to .zrem to .zRem.
I've also noticed that you aren't explicitly connecting to Redis after creating the client. You'll want to do that.
Try this:
const Redis = require("redis")
const client = Redis.createClient({
url: process.env.REDIS,
})
// CONNECT TO REDIS
// NOTE: this code assumes that the Node.js version supports top-level await
client.on('error', (err) => console.log('Redis Client Error', err));
await client.connect(); //
// DELETE ONE POST
const deletePost = async (deletedPost) => {
await client.zRem("posts", JSON.stringify(deletedPost))
}
// DELETES MANY POSTS
const deleteManyPosts = (postsToDelete) => {
postsToDelete.map(async (post) => {
await client.zRem("posts", JSON.stringify(post))
})
}
module.exports = { deletePost, deleteManyPosts }

React Application Reloading (Webpack) when Saving Image to Server

I am writing a React App in Visual Studio 2019 (ASP NET Core 3.1) and having an issue where the App reloads in the browser when I save a new profile picture (to disc).
This is causing issue on the page as its running the UseEffect() before it begins processing the async response with the updated info.
I am able to reproduce this behavior by simply overwriting or deleting the image file from file explorer. And it is important to NOTE that if I attempt to move my images folder outside of SRC folder, I get an error saying this is not allowed.
My initial investigation tells me this has to do with Webpack watching the images folder. But every attempt I have made to prevent it from watching the folder has failed. I have gone over the Webpack documentation many times but nothing seems to change.
I have two config files in my application Webpack.config.js and WebpackDevServer.config.js. It doesn't seem to matter what config I modify it always reloads the App. I have also tried setting the ENV mode to 'development' but again no dice.
Can someone please tell me how to modify the webpack configs to avoid the application from reloading.
If you can provide the above back with the necessary change it would be MUCH appreciated.
src\Images\Users\Profile Pictures - is the path I would like to ignore.
Webpack.config.js
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// #remove-on-eject-end
'use strict';
const fs = require('fs');
const path = require('path');
const webpack = require('webpack');
const resolve = require('resolve');
const PnpWebpackPlugin = require('pnp-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const InlineChunkHtmlPlugin = require('react-dev-utils/InlineChunkHtmlPlugin');
const TerserPlugin = require('terser-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
const safePostCssParser = require('postcss-safe-parser');
const ManifestPlugin = require('webpack-manifest-plugin');
const InterpolateHtmlPlugin = require('react-dev-utils/InterpolateHtmlPlugin');
const WorkboxWebpackPlugin = require('workbox-webpack-plugin');
const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin');
const ModuleScopePlugin = require('react-dev-utils/ModuleScopePlugin');
const getCSSModuleLocalIdent = require('react-dev-utils/getCSSModuleLocalIdent');
const paths = require('./paths');
const modules = require('./modules');
const getClientEnvironment = require('./env');
const ModuleNotFoundPlugin = require('react-dev-utils/ModuleNotFoundPlugin');
const ForkTsCheckerWebpackPlugin = require('react-dev-utils/ForkTsCheckerWebpackPlugin');
const typescriptFormatter = require('react-dev-utils/typescriptFormatter');
// #remove-on-eject-begin
const getCacheIdentifier = require('react-dev-utils/getCacheIdentifier');
// #remove-on-eject-end
const postcssNormalize = require('postcss-normalize');
const appPackageJson = require(paths.appPackageJson);
// Source maps are resource heavy and can cause out of memory issue for large source files.
const shouldUseSourceMap = process.env.GENERATE_SOURCEMAP !== 'false';
// Some apps do not need the benefits of saving a web request, so not inlining the chunk
// makes for a smoother build process.
const shouldInlineRuntimeChunk = process.env.INLINE_RUNTIME_CHUNK !== 'false';
const isExtendingEslintConfig = process.env.EXTEND_ESLINT === 'true';
const imageInlineSizeLimit = parseInt(
process.env.IMAGE_INLINE_SIZE_LIMIT || '10000'
);
// Check if TypeScript is setup
const useTypeScript = fs.existsSync(paths.appTsConfig);
// style files regexes
const cssRegex = /\.css$/;
const cssModuleRegex = /\.module\.css$/;
const sassRegex = /\.(scss|sass)$/;
const sassModuleRegex = /\.module\.(scss|sass)$/;
// This is the production and development configuration.
// It is focused on developer experience, fast rebuilds, and a minimal bundle.
module.exports = function (webpackEnv) {
const isEnvDevelopment = webpackEnv === 'development';
const isEnvProduction = webpackEnv === 'production';
// Variable used for enabling profiling in Production
// passed into alias object. Uses a flag if passed into the build command
const isEnvProductionProfile =
isEnvProduction && process.argv.includes('--profile');
// We will provide `paths.publicUrlOrPath` to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz.
// Get environment variables to inject into our app.
const env = getClientEnvironment(paths.publicUrlOrPath.slice(0, -1));
// common function to get style loaders
const getStyleLoaders = (cssOptions, preProcessor) => {
const loaders = [
isEnvDevelopment && require.resolve('style-loader'),
isEnvProduction && {
loader: MiniCssExtractPlugin.loader,
// css is located in `static/css`, use '../../' to locate index.html folder
// in production `paths.publicUrlOrPath` can be a relative path
options: paths.publicUrlOrPath.startsWith('.')
? { publicPath: '../../' }
: {},
},
{
.....
WebpackDevServer.config.js
/**
* Copyright (c) 2015-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// #remove-on-eject-end
'use strict';
const fs = require('fs');
const errorOverlayMiddleware = require('react-dev-utils/errorOverlayMiddleware');
const evalSourceMapMiddleware = require('react-dev-utils/evalSourceMapMiddleware');
const noopServiceWorkerMiddleware = require('react-dev-utils/noopServiceWorkerMiddleware');
const ignoredFiles = require('react-dev-utils/ignoredFiles');
const redirectServedPath = require('react-dev-utils/redirectServedPathMiddleware');
const paths = require('./paths');
const getHttpsConfig = require('./getHttpsConfig');
const host = process.env.HOST || '0.0.0.0';
const sockHost = process.env.WDS_SOCKET_HOST;
const sockPath = process.env.WDS_SOCKET_PATH; // default: '/sockjs-node'
const sockPort = process.env.WDS_SOCKET_PORT;
module.exports = function(proxy, allowedHost) {
return {
// WebpackDevServer 2.4.3 introduced a security fix that prevents remote
// websites from potentially accessing local content through DNS rebinding:
// https://github.com/webpack/webpack-dev-server/issues/887
// https://medium.com/webpack/webpack-dev-server-middleware-security-issues-1489d950874a
// However, it made several existing use cases such as development in cloud
// environment or subdomains in development significantly more complicated:
// https://github.com/facebook/create-react-app/issues/2271
// https://github.com/facebook/create-react-app/issues/2233
// While we're investigating better solutions, for now we will take a
// compromise. Since our WDS configuration only serves files in the `public`
// folder we won't consider accessing them a vulnerability. However, if you
// use the `proxy` feature, it gets more dangerous because it can expose
// remote code execution vulnerabilities in backends like Django and Rails.
// So we will disable the host check normally, but enable it if you have
// specified the `proxy` setting. Finally, we let you override it if you
// really know what you're doing with a special environment variable.
disableHostCheck:
!proxy || process.env.DANGEROUSLY_DISABLE_HOST_CHECK === 'true',
// Enable gzip compression of generated files.
compress: true,
// Silence WebpackDevServer's own logs since they're generally not useful.
// It will still show compile warnings and errors with this setting.
clientLogLevel: 'none',
// By default WebpackDevServer serves physical files from current directory
// in addition to all the virtual build products that it serves from memory.
// This is confusing because those files won’t automatically be available in
// production build folder unless we copy them. However, copying the whole
// project directory is dangerous because we may expose sensitive files.
// Instead, we establish a convention that only files in `public` directory
// get served. Our build script will copy `public` into the `build` folder.
// In `index.html`, you can get URL of `public` folder with %PUBLIC_URL%:
// <link rel="icon" href="%PUBLIC_URL%/favicon.ico">
// In JavaScript code, you can access it with `process.env.PUBLIC_URL`.
// Note that we only recommend to use `public` folder as an escape hatch
// for files like `favicon.ico`, `manifest.json`, and libraries that are
// for some reason broken when imported through webpack. If you just want to
// use an image, put it in `src` and `import` it from JavaScript instead.
contentBase: paths.appPublic,
contentBasePublicPath: paths.publicUrlOrPath,
// By default files from `contentBase` will not trigger a page reload.
watchContentBase: true,
// Enable hot reloading server. It will provide WDS_SOCKET_PATH endpoint
// for the WebpackDevServer client so it can learn when the files were
// updated. The WebpackDevServer client is included as an entry point
// in the webpack development configuration. Note that only changes
// to CSS are currently hot reloaded. JS changes will refresh the browser.
hot: true,
// Use 'ws' instead of 'sockjs-node' on server since we're using native
// websockets in `webpackHotDevClient`.
transportMode: 'ws',
// Prevent a WS client from getting injected as we're already including
// `webpackHotDevClient`.
injectClient: false,
// Enable custom sockjs pathname for websocket connection to hot reloading server.
// Enable custom sockjs hostname, pathname and port for websocket connection
// to hot reloading server.
sockHost,
sockPath,
sockPort,
// It is important to tell WebpackDevServer to use the same "publicPath" path as
// we specified in the webpack config. When homepage is '.', default to serving
// from the root.
// remove last slash so user can land on `/test` instead of `/test/`
publicPath: paths.publicUrlOrPath.slice(0, -1),
// WebpackDevServer is noisy by default so we emit custom message instead
// by listening to the compiler events with `compiler.hooks[...].tap` calls above.
quiet: true,
// Reportedly, this avoids CPU overload on some systems.
// https://github.com/facebook/create-react-app/issues/293
// src/node_modules is not ignored to support absolute imports
// https://github.com/facebook/create-react-app/issues/1065
watchOptions: {
ignored: ignoredFiles(paths.appSrc)
},
...
Thanks Again,
Adam

TestCafe: Changing download location

I am trying to change the download location and I found these codes doing research (sorry I forgot where I got these)
const browserConnection = t.testRun.browserConnection;
const client = browserConnection.provider.plugin.openedBrowsers[browserConnection.id].client;
const { Network, Page } = client;
const downloadDirectory = '../my_downloads');
await Promise.all([
Network.enable(),
Page.enable()
]);
Network.requestWillBeSent((param) => {
// console.log("Network.requestWillBeSent: " + JSON.stringify(param));
});
Network.responseReceived((param) => {
// console.log("Network.responseReceived: " + JSON.stringify(param));
});
await Page.setDownloadBehavior({
behavior: 'allow',
downloadPath: downloadDirectory
});
It was working perfectly fine using version 10.9.2 and this version was installed globally. I updated my TestCafe to 1.10.1 locally installed and now got this error:
TypeError: Cannot destructure property 'Network' of 'client' as it is undefined.
Any inputs are well appreciated. And looking forward to it :)
This internal API has changed due to testing support of multiple windows. Please use the getActiveClient method:
const browserConnection = t.testRun.browserConnection;
const runtimeInfo = rowserConnection.provider.plugin.openedBrowsers[browserConnection.id];
const { Network, Page } = await runtimeInfo.browserClient.getActiveClient();
If you need to change the download location to read and check a file from it, please use a public API for this: example.

How to cache .mp4 files in Safari with workbox-webpack-plugin?

I'm having exactly the same issue reported at https://github.com/GoogleChrome/workbox/issues/1663 which describes an issue that occurs exclusively in Safari where mp4 videos are not rendered after being cached by the service worker.
I'm using workbox-webpack-plugin, so the instructions provided in the comment https://github.com/GoogleChrome/workbox/issues/1663#issuecomment-448755945 will not work in my case. I'm not being able to require workbox-range-requests plugin in my webpack config file and pass it to the runtime caching options because I believe this package is intended for browser usage only. My workbox config is precaching .mp4 assets and uses a network first strategy for runtime caching.
How can I setup workbox-range-requests with workbox-webpack-plugin?
EDIT: Following Jeff's answer below, I've adjusted my webpack config to the following:
new WorkboxPlugin.InjectManifest({
swSrc: serviceWorkerSrcPath,
swDest: serviceWorkerBuildPath,
importsDirectory: 'sw',
})
The build produces the following service worker:
importScripts("/_build/sw/precache-manifest.8a0be820b796b153c97ba206d9753bdb.js", "https://storage.googleapis.com/workbox-cdn/releases/3.6.2/workbox-sw.js");
workbox.precaching.precacheAndRoute(self.__precacheManifest || []);
workbox.routing.registerRoute(
/.*\.mp4/,
new workbox.strategies.CacheFirst({
cacheName: 'videos',
plugins: [
new workbox.cacheableResponse.Plugin({ statuses: [200] }),
new workbox.rangeRequests.Plugin(),
],
}),
);
If forgot to mention previously, but I've also added crossOrigin="anonymous" attribute to the video elements.
EDIT:
Repro that demonstrates it does not work as expected on Safari: https://github.com/acostalima/workbox-range-requests-mp4-demo
There's specific guidance for this use case in the "Serve cached audio and video" recipe in the Workbox documentation.
You can continue using the workbox-webpack-plugin, but I'd suggest using it in InjectManifest mode, which will give you control over the top-level service worker file. That will in turn make it possible to follow the recipe.
This documentation has guidance on configuring workbox-webpack-plugin in InjectManifest mode.
I had the same issue with Safari and managed to resolve it by removing my video from the precahe list self.__precacheManifest and instead by adding it in the service worker's install handler:
self.addEventListener('install', (event) => {
const urls = [/* videoUrl */];
const cacheName = 'videos';
event.waitUntil(caches.open(cacheName).then((cache) => cache.addAll(urls)));
});
Looking at the logs, it seemed that otherwise only the precache was used to respond to the request for the video resource and not the router.
Although the docs say that adding mp4s to the precache cache and then configuring the range plugin to handle precache mp4s is supposed to work, in practice, it wasn't. Removing mp4s from the precache and configuring your own video cache with the range plugin did the trick for me. Don't forget to add the crossorigin="anonymous" tag to your videos!
Here's how I did it (webpack 5, workbox 6):
// src/service-worker.js
import { CacheableResponsePlugin } from 'workbox-cacheable-response';
import { cacheNames } from 'workbox-core';
import { precacheAndRoute } from 'workbox-precaching';
import { RangeRequestsPlugin } from 'workbox-range-requests';
import { registerRoute } from 'workbox-routing';
import { CacheFirst } from 'workbox-strategies';
const allEntries = self.__WB_MANIFEST; // Injected by WorkboxWebpackPlugin at compile time
const videoEntries = allEntries.filter((entry) => entry.url.endsWith('.mp4'));
const restEntries = allEntries.filter((entry) => !entry.url.endsWith('.mp4'));
precacheAndRoute(restEntries);
const videoCacheName = `${cacheNames.prefix}-videos-${cacheNames.suffix}`;
self.addEventListener('install', (event) => {
const allVideosAddedToCache = caches.open(videoCacheName).then((videoCache) => {
const videoUrls = videoEntries.map((entry) => entry.url);
return videoCache.addAll(videoUrls);
});
event.waitUntil(allVideosAddedToCache);
});
registerRoute(
(route) => route.url.pathname.endsWith('.mp4'),
new CacheFirst({
cacheName: videoCacheName,
plugins: [new CacheableResponsePlugin({ statuses: [200] }), new RangeRequestsPlugin()],
})
);
// webpack.config.js
plugins: [
new WorkboxWebpackPlugin.InjectManifest({
swSrc: 'src/service-worker.js',
}),
]
// index.tsx
if ('serviceWorker' in navigator) {
window.addEventListener('load', () => {
navigator.serviceWorker.register('/service-worker.js');
});
}

Exit with error after a task loaded by gulp-tasks failed

I am working with magento2-frontools and try to solve this issue:
https://github.com/SnowdogApps/magento2-frontools/issues/231
The problem is, that gulp styles should have a non-zero exit code in case of errors, but it exits with 0.
The gulp file looks like this:
// Tasks loading
require('gulp-task-loader')({
dir : 'task',
plugins: plugins,
configs: config
});
And the styles.js task like this:
'use strict';
module.exports = function() { // eslint-disable-line func-names
// Global variables
const gulp = this.gulp,
plugins = this.opts.plugins,
config = this.opts.configs,
themes = plugins.getThemes(),
streams = plugins.mergeStream();
// Generate all necessary symlinks before styles compilation, but ony if not a part of tasks pipeline
if (!plugins.util.env.pipeline) {
plugins.runSequence('inheritance');
}
// Loop through themes to compile scss or less depending on your config.json
themes.forEach(name => {
streams.add(require('../helper/scss')(gulp, plugins, config, name));
});
return streams;
};
(it can all be found on GitHub)
If have seen this approach to solve the problem:
.once("error", function () {
this.once("finish", () => process.exit(1));
})
But where can I add that code?
Just the --ci flag has to be used.