When setting maxSize to 100000 for example, I get more initial chunks than the value of maxInitialRequests. I get 7 (bootstrap, main + 5 vendor chunks) where as my maxInitialRequests is set to 3. My expectation is that total initial chunks should be capped at 3. Am I missing something here?
Here is my optimization configuration:
{
name: false,
chunks: 'all',
minChunks: 1,
minSize: 0,
maxSize: 10000,
maxInitialRequests: 3,
maxAsyncRequests: 5,
cacheGroups: {
vendor: {
name: 'vendor',
chunks: 'all',
test: /node_modules/,
priority: 1
},
common: {
name: 'common',
chunks: 'all',
minChunks: 2,
reuseExistingChunk: false,
priority: 2
},
default: false
}
}
According to the webpack docs
maxSize takes higher priority than maxInitialRequest/maxAsyncRequests.
Actual priority is maxInitialRequest/maxAsyncRequests < maxSize <
minSize.
Related
I need to use images in react native app, but right now I have to copy-paste a long dictionary to access all these images. I want to use this dictionary across components as well, so the relative path would change. But, the absolute path is connected to my computer and there are other computers working on this project as well. Is there anyway I could make a JSON file to store this and then import it somewhere?
const collegesData = {
"Benjamin Franklin": {
name: "Benjamin Franklin",
flag: require("../../assets/images/college-logos/franklin-flag.png"),
points: 0,
},
"Berkeley": {
name: "Berkeley",
flag: require("../../assets/images/college-logos/berkeley-flag.png"),
points: 0,
},
"Pauli Murray": {
name: "Pauli Murray",
flag: require("../../assets/images/college-logos/murray-flag.png"),
points: 0,
},
"Timothy Dwight": {
name: "Timothy Dwight",
flag: require("../../assets/images/college-logos/td-flag.png"),
points: 0,
},
"Silliman": {
name: "Silliman",
flag: require("../../assets/images/college-logos/silliman-flag.png"),
points: 0,
},
"Ezra Stiles": {
name: "Ezra Stiles",
flag: require("../../assets/images/college-logos/stiles-flag.png"),
points: 0,
},
"Morse": {
name: "Morse",
flag: require("../../assets/images/college-logos/morse-flag.png"),
points: 0,
},
"Branford": {
name: "Branford",
flag: require("../../assets/images/college-logos/branford-flag.png"),
points: 0,
},
"Davenport": {
name: "Davenport",
flag: require("../../assets/images/college-logos/davenport-flag.png"),
points: 0,
},
"Jonathan Edwards": {
name: "Jonathan Edwards",
flag: require("../../assets/images/college-logos/je-flag.png"),
points: 0,
},
"Grace Hopper": {
name: "Grace Hopper",
flag: require("../../assets/images/college-logos/hopper-flag.png"),
points: 0,
},
"Saybrook": {
name: "Saybrook",
flag: require("../../assets/images/college-logos/saybrook-flag.png"),
points: 0,
},
"Trumbull": {
name: "Trumbull",
flag: require("../../assets/images/college-logos/trumbull-flag.png"),
points: 0,
},
"Pierson": {
name: "Pierson",
flag: require("../../assets/images/college-logos/pierson-flag.png"),
points: 0,
},
};
My original configuration in vue.config.js using the default chunking strategy, which takes about 5 minutes to build locally and 35 minutes in gitlab pipeline, and results in one chunk being > 50MB and several other large chunks(see screenshot)
module.exports = {
devServer: {
disableHostCheck: true
},
publicPath: process.env.PUBLIC_PATH || '/',
configureWebpack: {
},
pages: {
index: {
entry: "src/main.js",
template: "public/index.html",
filename: "index.html",
chunks: ["chunk-vendors", "chunk-common", "index"]
},
visualform: {
entry: "src/visualform/main.js",
template: "public/visualform.html",
filename: "visualform.html",
chunks: ["chunk-vendors", "chunk-common", "visualform"],
}
}
};
So after having a read of the webpack docs i thought i'd use the following updated config to try and reduce the chunk size
module.exports = {
devServer: {
disableHostCheck: true
},
publicPath: process.env.PUBLIC_PATH || '/',
configureWebpack: {
plugins: [
new webpack.optimize.AggressiveSplittingPlugin({
minSize: 20000,
maxSize: 200000,
}),
],
},
pages: {
index: {
entry: "src/main.js",
template: "public/index.html",
filename: "index.html",
chunks: ["chunk-vendors", "chunk-common", "index"]
},
visualform: {
entry: "src/visualform/main.js",
template: "public/visualform.html",
filename: "visualform.html",
chunks: ["chunk-vendors", "chunk-common", "visualform"],
}
}
};
which results in the following reduced chunk size, but appears to break the app(there is no more index.js and visualform.js present, i also tried having an output section in configureWebpack with filename, but made no difference)
When i access the app through the browser i get a blank page and the following in console
A bunch of messages like this:
The resource http://localhost:3000/js/379.js was preloaded using link preload but not used within a few seconds from the window's load event. Please make sure it has an appropriate as value and it is preloaded intentionally.
I've also tried doing a split chunk optimization in configure webpack to try and overide the default chunking of vue cli service:
configureWebpack: {
optimization: {
splitChunks: {
cacheGroups: {
chunks: 'all',
minSize: 20000,
maxSize: 200000,
maxInitialRequests: Infinity,
'chunk-vendors': {
test: /[\\/]node_modules[\\/]/,
name(module) {
const packageName = module.context.match(/[\\/]node_modules[\\/](.*?)([\\/]|$)/)[1];
return `npm.${packageName.replace('#', '')}`;
},
chunks: 'all',
priority: -10,
maxAsyncRequests: 6,
maxInitialRequests: 4,
minChunks: 2,
minSize: 20000,
maxSize: 200000,
reuseExistingChunk: true,
enforce: true
},
'chunk-common': {
name: 'chunk-common',
test: path.resolve('src/components'),
chunks: 'all',
minChunks: 3, // minimum common number
minSize: 20000,
maxSize: 250000,
priority: 5,
reuseExistingChunk: true,
}
}
}
}
}
But this strategy results in the same blank page, no entry point visualform.js and main.js pages being generated, and associated warnings in console
The resource http://localhost:3000/js/npm.element-ui~._node_modules_element-ui_lib_t.js was preloaded using link preload but not used within a few seconds from the window's load event. Please make sure it has an appropriate as value and it is preloaded intentionally.
Any help would be much appreciated. The chunking reduces the build time in gitlab pipeline by about 65%, so if i can get it working will minimize this bottleneck.
I'm a bit of a webpack noob and have been reading the docs, but there are a lot of config options, so i've probably missed something.
Thanks in advance.
Identified which component was the largest chunk and applied dynamic loading to the imported components within as per the docs here https://router.vuejs.org/guide/advanced/lazy-loading.html
This reduced my build time in gitlab pipeline to around 9 minutes
I am stuck on a page where i am not able to display the charts on the page.
To make it simplify what I have done is, here is the code sandbox:
I see there an error in console about the data, I am not sure about it.
https://codesandbox.io/s/compassionate-snyder-bckoq
I want to display the chart like this (as an example), but I am not able to display on the code sandbox
Please help.
The format of series is not aligned with ApexCharts.
You need to transform the data to match with ApexChart format.
Please see the changes in the codesandbox.
https://codesandbox.io/s/small-dew-eztod?file=/src/components/HelloWorld.vue
options: {
// X axis labels
xaxis: {
type: 'date',
categories: ["2021-05-04", "2021-05-05", "2021-05-07"]
},
},
series: [
{
name: "total",
data: [2, 2, 1],
},
{
name: "pending",
data: [0, 1, 0],
},
{
name: "approved",
data: [2, 1, 1],
},
{
name: "rejected",
data: [0, 0, 0],
},
],
Transform data to fit ApexChart
const data = {
"2021-05-04": {
total: 2,
pending: 0,
approved: 2,
rejected: 0,
},
"2021-05-05": {
total: 2,
pending: 1,
approved: 1,
rejected: 0,
},
"2021-05-07": {
total: 1,
pending: 0,
approved: 1,
rejected: 0,
},
};
const xaxis = {
type: "date",
categories: Object.keys(data).map((key) => key), // ['2021-05-04', '2021-05-05', '2021-05-07']
};
let statusObj = [];
for (const dataValue of Object.values(data)) { // get the values from keys '2021-05-04', '2021-05-05' ...
// loop the values, e.g. 1st loop: { total: 2, pending: 0, approved: 2, rejected: 0, }
for (const [key, value] of Object.entries(dataValue)) {
// take 'total' as example, find if statusObj already has { name: 'total', data: [x] }, e.g. statusObj = { name: 'total', data: [1] }
const existingStatusIndex = Object.keys(statusObj).find(
(sKey) => statusObj[sKey].name === key
);
// if yes, return the index of it
if (existingStatusIndex) {
// add new data value to existing data object. e.g. { name: 'total', data: [1, 2] }
statusObj[existingStatusIndex].data.push(value);
continue;
}
// if no, create a new object and add it to statusObj
statusObj.push({
name: key,
data: [value],
});
}
}
Output:
xaxis {
type: 'date',
categories: [ '2021-05-04', '2021-05-05', '2021-05-07' ]
}
statusObj [
{ name: 'total', data: [ 2, 2, 1 ] },
{ name: 'pending', data: [ 0, 1, 0 ] },
{ name: 'approved', data: [ 2, 1, 1 ] },
{ name: 'rejected', data: [ 0, 0, 0 ] }
]
Is it possible to get the total amount of messages published to a vhost in RabbitMQ (during the last minute or something like that)? I have checked the RabbitMQ Management HTTP Stats documentation but it seems to work only for a specific object, such as queues/exchanges/channels.
In 3.6.10 I can see that http://localhost:15672/api/vhosts and http://localhost:15672/api/vhosts/${vhost} (e.g. %2F for /) do have:
message_stats: {
publish: 632,
publish_details: {
rate: 0
},
confirm: 0,
confirm_details: {
rate: 0
},
return_unroutable: 0,
return_unroutable_details: {
rate: 0
},
get: 0,
get_details: {
rate: 0
},
get_no_ack: 0,
get_no_ack_details: {
rate: 0
},
deliver: 998,
deliver_details: {
rate: 0
},
deliver_no_ack: 20,
deliver_no_ack_details: {
rate: 0
},
redeliver: 522,
redeliver_details: {
rate: 0
},
ack: 466,
ack_details: {
rate: 0
},
deliver_get: 1018,
deliver_get_details: {
rate: 0
}
}
if I try to override an existing indexed field, I do not get an error.
It should error, because it is not update()!
var loki = require('lokijs');
var db = new loki('test.json');
var users = db.addCollection('users', { indices: ['userID']});
users.insert(
{
'name': 'Anna',
'userID': 1
},
{
'name': 'Bernd',
'userID': 2
},
{
'name': 'Christa',
'userID': 3
});
db.save();
users.insert({'name': 'Dieter','userID': 2}); // this should error!!
How can I make an unique index to get an error when trying to inset an existing userID ?
the indices option creates an index on the field, which allows for faster retrieval because the index lives in a separate sorted array within the collection (so Loki can use binary-search instead of a full loop to fetch records). However, you're looking for a unique index, which is created with ensureUniqueIndex (check here, scroll down to Finding Documents, there's a section on unique indexes.). With that, you can use the collection method by(field, value) (which can even be curried if you only pass the field value), which uses the unique index to full potential (about 2x the speed of an indexed field). Remember that you need to explicitly call ensureUniqueIndex because unique indexes cannot be serialized and persisted.
update: once the ensureUniqueIndex method is called, the collection will throw an error if you try to insert a duplicate key record. If you have repository checked out you can take a look at spec/generic/unique.spec.js for an example ( here )
var loki = require('lokijs');
var db = new loki('test.json');
var users = db.addCollection('users', { indices: ['userID']});
users.ensureUniqueIndex('userID');
users.on('error',function(obj){
console.log('error ... adding 1 to userID');
obj.userID = obj.userID+1;
return obj;
});
users.insert(
{
'name': 'Anna',
'userID': 1
});
users.insert(
{
'name': 'Bernd',
'userID': 2
});
users.insert(
{
'name': 'Christa',
'userID': 3
});
db.save();
console.log(users.data);
try {
users.insert({'name': 'Dieter','userID': 2}); // this should error!!
} catch(e){
var i = 2+1;
users.insert({'name': 'Dieter','userID': i}); // this should error!!
}
db.save();
db2 = new loki('test.json');
db2.loadDatabase({}, function () {
var users2 = db2.getCollection('users')
console.log(users2.data);
});
Either users.on('error',...) nor try{ users.insert...} catch(e){// id+1} handles the thrown error
That's my console:
[ { name: 'Anna',
userID: 1,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 1 },
{ name: 'Bernd',
userID: 2,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 2 },
{ name: 'Christa',
userID: 3,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 3 } ]
Duplicate key for property userID: 2
[ { name: 'Anna',
userID: 1,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 1 },
{ name: 'Bernd',
userID: 2,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 2 },
{ name: 'Christa',
userID: 3,
meta: { revision: 0, created: 1436186694342, version: 0 },
'$loki': 3 },
{ name: 'Dieter',
userID: 2,
meta: { revision: 0, created: 0, version: 0 },
'$loki': 4 } ]