nav tabs on admin dashboard

This commit is contained in:
2019-03-07 00:20:34 -06:00
parent f73d6ae228
commit e4f473f376
11661 changed files with 216240 additions and 1544253 deletions

View File

@@ -6,8 +6,13 @@
class AggressiveMergingPlugin {
constructor(options) {
if(options !== undefined && typeof options !== "object" || Array.isArray(options)) {
throw new Error("Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/");
if (
(options !== undefined && typeof options !== "object") ||
Array.isArray(options)
) {
throw new Error(
"Argument should be an options object. To use defaults, pass in nothing.\nFor more info on options, see https://webpack.js.org/plugins/"
);
}
this.options = options || {};
}
@@ -16,99 +21,66 @@ class AggressiveMergingPlugin {
const options = this.options;
const minSizeReduce = options.minSizeReduce || 1.5;
function getParentsWeight(chunk) {
return chunk.parents.map((p) => {
return p.isInitial() ? options.entryChunkMultiplicator || 10 : 1;
}).reduce((a, b) => {
return a + b;
}, 0);
}
compiler.plugin("this-compilation", (compilation) => {
compilation.plugin("optimize-chunks-advanced", (chunks) => {
let combinations = [];
chunks.forEach((a, idx) => {
if(a.isInitial()) return;
for(let i = 0; i < idx; i++) {
const b = chunks[i];
if(b.isInitial()) continue;
combinations.push({
a,
b,
improvement: undefined
compiler.hooks.thisCompilation.tap(
"AggressiveMergingPlugin",
compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"AggressiveMergingPlugin",
chunks => {
let combinations = [];
chunks.forEach((a, idx) => {
if (a.canBeInitial()) return;
for (let i = 0; i < idx; i++) {
const b = chunks[i];
if (b.canBeInitial()) continue;
combinations.push({
a,
b,
improvement: undefined
});
}
});
for (const pair of combinations) {
const a = pair.b.size({
chunkOverhead: 0
});
const b = pair.a.size({
chunkOverhead: 0
});
const ab = pair.b.integratedSize(pair.a, {
chunkOverhead: 0
});
let newSize;
if (ab === false) {
pair.improvement = false;
return;
} else {
newSize = ab;
}
pair.improvement = (a + b) / newSize;
}
combinations = combinations.filter(pair => {
return pair.improvement !== false;
});
combinations.sort((a, b) => {
return b.improvement - a.improvement;
});
const pair = combinations[0];
if (!pair) return;
if (pair.improvement < minSizeReduce) return;
if (pair.b.integrate(pair.a, "aggressive-merge")) {
chunks.splice(chunks.indexOf(pair.a), 1);
return true;
}
}
});
combinations.forEach((pair) => {
const a = pair.b.size({
chunkOverhead: 0
});
const b = pair.a.size({
chunkOverhead: 0
});
const ab = pair.b.integratedSize(pair.a, {
chunkOverhead: 0
});
let newSize;
if(ab === false) {
pair.improvement = false;
return;
} else if(options.moveToParents) {
const aOnly = ab - b;
const bOnly = ab - a;
const common = a + b - ab;
newSize = common + getParentsWeight(pair.b) * aOnly + getParentsWeight(pair.a) * bOnly;
} else {
newSize = ab;
}
pair.improvement = (a + b) / newSize;
});
combinations = combinations.filter((pair) => {
return pair.improvement !== false;
});
combinations.sort((a, b) => {
return b.improvement - a.improvement;
});
const pair = combinations[0];
if(!pair) return;
if(pair.improvement < minSizeReduce) return;
if(options.moveToParents) {
const commonModules = pair.b.modules.filter((m) => {
return pair.a.modules.indexOf(m) >= 0;
});
const aOnlyModules = pair.b.modules.filter((m) => {
return commonModules.indexOf(m) < 0;
});
const bOnlyModules = pair.a.modules.filter((m) => {
return commonModules.indexOf(m) < 0;
});
aOnlyModules.forEach((m) => {
pair.b.removeModule(m);
m.removeChunk(pair.b);
pair.b.parents.forEach((c) => {
c.addModule(m);
m.addChunk(c);
});
});
bOnlyModules.forEach((m) => {
pair.a.removeModule(m);
m.removeChunk(pair.a);
pair.a.parents.forEach((c) => {
c.addModule(m);
m.addChunk(c);
});
});
}
if(pair.b.integrate(pair.a, "aggressive-merge")) {
chunks.splice(chunks.indexOf(pair.a), 1);
return true;
}
});
});
);
}
);
}
}

View File

@@ -5,191 +5,290 @@
"use strict";
const identifierUtils = require("../util/identifier");
const { intersect } = require("../util/SetHelpers");
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json");
function moveModuleBetween(oldChunk, newChunk) {
return function(module) {
/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
const moveModuleBetween = (oldChunk, newChunk) => {
return module => {
oldChunk.moveModule(module, newChunk);
};
}
};
function isNotAEntryModule(entryModule) {
return function(module) {
const isNotAEntryModule = entryModule => {
return module => {
return entryModule !== module;
};
}
function copyWithReason(obj) {
const newObj = {};
Object.keys(obj).forEach((key) => {
newObj[key] = obj[key];
});
if(!newObj.reasons || newObj.reasons.indexOf("aggressive-splitted") < 0)
newObj.reasons = (newObj.reasons || []).concat("aggressive-splitted");
return newObj;
}
};
class AggressiveSplittingPlugin {
/**
* @param {AggressiveSplittingPluginOptions=} options options object
*/
constructor(options) {
this.options = options || {};
if(typeof this.options.minSize !== "number") this.options.minSize = 30 * 1024;
if(typeof this.options.maxSize !== "number") this.options.maxSize = 50 * 1024;
if(typeof this.options.chunkOverhead !== "number") this.options.chunkOverhead = 0;
if(typeof this.options.entryChunkMultiplicator !== "number") this.options.entryChunkMultiplicator = 1;
if (!options) options = {};
validateOptions(schema, options, "Aggressive Splitting Plugin");
this.options = options;
if (typeof this.options.minSize !== "number") {
this.options.minSize = 30 * 1024;
}
if (typeof this.options.maxSize !== "number") {
this.options.maxSize = 50 * 1024;
}
if (typeof this.options.chunkOverhead !== "number") {
this.options.chunkOverhead = 0;
}
if (typeof this.options.entryChunkMultiplicator !== "number") {
this.options.entryChunkMultiplicator = 1;
}
}
apply(compiler) {
compiler.plugin("this-compilation", (compilation) => {
compilation.plugin("optimize-chunks-advanced", (chunks) => {
// Precompute stuff
const nameToModuleMap = new Map();
compilation.modules.forEach(m => {
const name = identifierUtils.makePathsRelative(compiler.context, m.identifier(), compilation.cache);
nameToModuleMap.set(name, m);
compiler.hooks.thisCompilation.tap(
"AggressiveSplittingPlugin",
compilation => {
let needAdditionalSeal = false;
let newSplits;
let fromAggressiveSplittingSet;
let chunkSplitDataMap;
compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
newSplits = [];
fromAggressiveSplittingSet = new Set();
chunkSplitDataMap = new Map();
});
compilation.hooks.optimizeChunksAdvanced.tap(
"AggressiveSplittingPlugin",
chunks => {
// Precompute stuff
const nameToModuleMap = new Map();
const moduleToNameMap = new Map();
for (const m of compilation.modules) {
const name = identifierUtils.makePathsRelative(
compiler.context,
m.identifier(),
compilation.cache
);
nameToModuleMap.set(name, m);
moduleToNameMap.set(m, name);
}
const savedSplits = compilation.records && compilation.records.aggressiveSplits || [];
const usedSplits = compilation._aggressiveSplittingSplits ?
savedSplits.concat(compilation._aggressiveSplittingSplits) : savedSplits;
// Check used chunk ids
const usedIds = new Set();
for (const chunk of chunks) {
usedIds.add(chunk.id);
}
const minSize = this.options.minSize;
const maxSize = this.options.maxSize;
// 1. try to restore to recorded splitting
for(let j = 0; j < usedSplits.length; j++) {
const splitData = usedSplits[j];
const selectedModules = splitData.modules.map(name => nameToModuleMap.get(name));
const recordedSplits =
(compilation.records && compilation.records.aggressiveSplits) ||
[];
const usedSplits = newSplits
? recordedSplits.concat(newSplits)
: recordedSplits;
// Does the modules exist at all?
if(selectedModules.every(Boolean)) {
const minSize = this.options.minSize;
const maxSize = this.options.maxSize;
// Find all chunks containing all modules in the split
for(let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
const applySplit = splitData => {
// Cannot split if id is already taken
if (splitData.id !== undefined && usedIds.has(splitData.id)) {
return false;
}
// Cheap check if chunk is suitable at all
if(chunk.getNumberOfModules() < splitData.modules.length)
continue;
// Get module objects from names
const selectedModules = splitData.modules.map(name =>
nameToModuleMap.get(name)
);
// Check if all modules are in the chunk
if(selectedModules.every(m => chunk.containsModule(m))) {
// Does the modules exist at all?
if (!selectedModules.every(Boolean)) return false;
// Is chunk identical to the split or do we need to split it?
if(chunk.getNumberOfModules() > splitData.modules.length) {
// split the chunk into two parts
const newChunk = compilation.addChunk();
selectedModules.forEach(moveModuleBetween(chunk, newChunk));
chunk.split(newChunk);
chunk.name = null;
newChunk._fromAggressiveSplitting = true;
if(j < savedSplits.length)
newChunk._fromAggressiveSplittingIndex = j;
if(splitData.id !== null && splitData.id !== undefined) {
newChunk.id = splitData.id;
}
newChunk.origins = chunk.origins.map(copyWithReason);
chunk.origins = chunk.origins.map(copyWithReason);
return true;
} else { // chunk is identical to the split
if(j < savedSplits.length)
chunk._fromAggressiveSplittingIndex = j;
chunk.name = null;
if(splitData.id !== null && splitData.id !== undefined) {
chunk.id = splitData.id;
// Check if size matches (faster than waiting for hash)
const size = selectedModules.reduce(
(sum, m) => sum + m.size(),
0
);
if (size !== splitData.size) return false;
// get chunks with all modules
const selectedChunks = intersect(
selectedModules.map(m => new Set(m.chunksIterable))
);
// No relevant chunks found
if (selectedChunks.size === 0) return false;
// The found chunk is already the split or similar
if (
selectedChunks.size === 1 &&
Array.from(selectedChunks)[0].getNumberOfModules() ===
selectedModules.length
) {
const chunk = Array.from(selectedChunks)[0];
if (fromAggressiveSplittingSet.has(chunk)) return false;
fromAggressiveSplittingSet.add(chunk);
chunkSplitDataMap.set(chunk, splitData);
return true;
}
// split the chunk into two parts
const newChunk = compilation.addChunk();
newChunk.chunkReason = "aggressive splitted";
for (const chunk of selectedChunks) {
selectedModules.forEach(moveModuleBetween(chunk, newChunk));
chunk.split(newChunk);
chunk.name = null;
}
fromAggressiveSplittingSet.add(newChunk);
chunkSplitDataMap.set(newChunk, splitData);
if (splitData.id !== null && splitData.id !== undefined) {
newChunk.id = splitData.id;
}
return true;
};
// try to restore to recorded splitting
let changed = false;
for (let j = 0; j < usedSplits.length; j++) {
const splitData = usedSplits[j];
if (applySplit(splitData)) changed = true;
}
// for any chunk which isn't splitted yet, split it and create a new entry
// start with the biggest chunk
const sortedChunks = chunks.slice().sort((a, b) => {
const diff1 = b.modulesSize() - a.modulesSize();
if (diff1) return diff1;
const diff2 = a.getNumberOfModules() - b.getNumberOfModules();
if (diff2) return diff2;
const modulesA = Array.from(a.modulesIterable);
const modulesB = Array.from(b.modulesIterable);
modulesA.sort();
modulesB.sort();
const aI = modulesA[Symbol.iterator]();
const bI = modulesB[Symbol.iterator]();
// eslint-disable-next-line no-constant-condition
while (true) {
const aItem = aI.next();
const bItem = bI.next();
if (aItem.done) return 0;
const aModuleIdentifier = aItem.value.identifier();
const bModuleIdentifier = bItem.value.identifier();
if (aModuleIdentifier > bModuleIdentifier) return -1;
if (aModuleIdentifier < bModuleIdentifier) return 1;
}
});
for (const chunk of sortedChunks) {
if (fromAggressiveSplittingSet.has(chunk)) continue;
const size = chunk.modulesSize();
if (size > maxSize && chunk.getNumberOfModules() > 1) {
const modules = chunk
.getModules()
.filter(isNotAEntryModule(chunk.entryModule))
.sort((a, b) => {
a = a.identifier();
b = b.identifier();
if (a > b) return 1;
if (a < b) return -1;
return 0;
});
const selectedModules = [];
let selectedModulesSize = 0;
for (let k = 0; k < modules.length; k++) {
const module = modules[k];
const newSize = selectedModulesSize + module.size();
if (newSize > maxSize && selectedModulesSize >= minSize) {
break;
}
selectedModulesSize = newSize;
selectedModules.push(module);
}
if (selectedModules.length === 0) continue;
const splitData = {
modules: selectedModules
.map(m => moduleToNameMap.get(m))
.sort(),
size: selectedModulesSize
};
if (applySplit(splitData)) {
newSplits = (newSplits || []).concat(splitData);
changed = true;
}
}
}
if (changed) return true;
}
}
);
compilation.hooks.recordHash.tap(
"AggressiveSplittingPlugin",
records => {
// 4. save made splittings to records
const allSplits = new Set();
const invalidSplits = new Set();
// 2. for any other chunk which isn't splitted yet, split it
for(let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];
const size = chunk.size(this.options);
if(size > maxSize && chunk.getNumberOfModules() > 1) {
const newChunk = compilation.addChunk();
const modules = chunk.getModules()
.filter(isNotAEntryModule(chunk.entryModule))
.sort((a, b) => {
a = a.identifier();
b = b.identifier();
if(a > b) return 1;
if(a < b) return -1;
return 0;
});
for(let k = 0; k < modules.length; k++) {
chunk.moveModule(modules[k], newChunk);
const newSize = newChunk.size(this.options);
const chunkSize = chunk.size(this.options);
// break early if it's fine
if(chunkSize < maxSize && newSize < maxSize && newSize >= minSize && chunkSize >= minSize)
break;
if(newSize > maxSize && k === 0) {
// break if there is a single module which is bigger than maxSize
break;
}
if(newSize > maxSize || chunkSize < minSize) {
// move it back
newChunk.moveModule(modules[k], chunk);
// check if it's fine now
if(newSize < maxSize && newSize >= minSize && chunkSize >= minSize)
break;
// Check if some splittings are invalid
// We remove invalid splittings and try again
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
if (splitData.hash && chunk.hash !== splitData.hash) {
// Split was successful, but hash doesn't equal
// We can throw away the split since it's useless now
invalidSplits.add(splitData);
}
}
}
if(newChunk.getNumberOfModules() > 0) {
chunk.split(newChunk);
chunk.name = null;
newChunk.origins = chunk.origins.map(copyWithReason);
chunk.origins = chunk.origins.map(copyWithReason);
compilation._aggressiveSplittingSplits = (compilation._aggressiveSplittingSplits || []).concat({
modules: newChunk.mapModules(m => identifierUtils.makePathsRelative(compiler.context, m.identifier(), compilation.cache))
});
return true;
if (invalidSplits.size > 0) {
records.aggressiveSplits = records.aggressiveSplits.filter(
splitData => !invalidSplits.has(splitData)
);
needAdditionalSeal = true;
} else {
chunks.splice(chunks.indexOf(newChunk), 1);
}
}
}
});
compilation.plugin("record-hash", (records) => {
// 3. save to made splittings to records
const minSize = this.options.minSize;
if(!records.aggressiveSplits) records.aggressiveSplits = [];
compilation.chunks.forEach((chunk) => {
if(chunk.hasEntryModule()) return;
const size = chunk.size(this.options);
const incorrectSize = size < minSize;
const modules = chunk.mapModules(m => identifierUtils.makePathsRelative(compiler.context, m.identifier(), compilation.cache));
if(typeof chunk._fromAggressiveSplittingIndex === "undefined") {
if(incorrectSize) return;
chunk.recorded = true;
records.aggressiveSplits.push({
modules: modules,
hash: chunk.hash,
id: chunk.id
});
} else {
const splitData = records.aggressiveSplits[chunk._fromAggressiveSplittingIndex];
if(splitData.hash !== chunk.hash || incorrectSize) {
if(chunk._fromAggressiveSplitting) {
chunk._aggressiveSplittingInvalid = true;
splitData.invalid = true;
} else {
splitData.hash = chunk.hash;
// set hash and id values on all (new) splittings
for (const chunk of compilation.chunks) {
const splitData = chunkSplitDataMap.get(chunk);
if (splitData !== undefined) {
splitData.hash = chunk.hash;
splitData.id = chunk.id;
allSplits.add(splitData);
// set flag for stats
chunk.recorded = true;
}
}
// Also add all unused historial splits (after the used ones)
// They can still be used in some future compilation
const recordedSplits =
compilation.records && compilation.records.aggressiveSplits;
if (recordedSplits) {
for (const splitData of recordedSplits) {
if (!invalidSplits.has(splitData)) allSplits.add(splitData);
}
}
// record all splits
records.aggressiveSplits = Array.from(allSplits);
needAdditionalSeal = false;
}
}
});
records.aggressiveSplits = records.aggressiveSplits.filter((splitData) => {
return !splitData.invalid;
});
});
compilation.plugin("need-additional-seal", (callback) => {
const invalid = compilation.chunks.some((chunk) => {
return chunk._aggressiveSplittingInvalid;
});
if(invalid)
return true;
});
});
);
compilation.hooks.needAdditionalSeal.tap(
"AggressiveSplittingPlugin",
() => {
if (needAdditionalSeal) {
needAdditionalSeal = false;
return true;
}
}
);
}
);
}
}
module.exports = AggressiveSplittingPlugin;

View File

@@ -3,48 +3,63 @@
Author Tobias Koppers @sokra
*/
"use strict";
const sortByIndex = (a, b) => {
return a.index - b.index;
};
const sortByIndex2 = (a, b) => {
return a.index2 - b.index2;
};
class ChunkModuleIdRangePlugin {
constructor(options) {
this.options = options;
}
apply(compiler) {
const options = this.options;
compiler.plugin("compilation", (compilation) => {
compilation.plugin("module-ids", (modules) => {
const chunk = this.chunks.find((chunk) => chunk.name === options.name);
if(!chunk) throw new Error("ChunkModuleIdRangePlugin: Chunk with name '" + options.name + "' was not found");
let currentId = options.start;
compiler.hooks.compilation.tap("ChunkModuleIdRangePlugin", compilation => {
compilation.hooks.moduleIds.tap("ChunkModuleIdRangePlugin", modules => {
const chunk = compilation.chunks.find(
chunk => chunk.name === options.name
);
if (!chunk) {
throw new Error(
`ChunkModuleIdRangePlugin: Chunk with name '${
options.name
}"' was not found`
);
}
let chunkModules;
if(options.order) {
chunkModules = chunk.modules.slice();
switch(options.order) {
if (options.order) {
chunkModules = Array.from(chunk.modulesIterable);
switch (options.order) {
case "index":
chunkModules.sort((a, b) => {
return a.index - b.index;
});
chunkModules.sort(sortByIndex);
break;
case "index2":
chunkModules.sort((a, b) => {
return a.index2 - b.index2;
});
chunkModules.sort(sortByIndex2);
break;
default:
throw new Error("ChunkModuleIdRangePlugin: unexpected value of order");
throw new Error(
"ChunkModuleIdRangePlugin: unexpected value of order"
);
}
} else {
chunkModules = modules.filter((m) => {
return m.chunks.indexOf(chunk) >= 0;
chunkModules = modules.filter(m => {
return m.chunksIterable.has(chunk);
});
}
for(let i = 0; i < chunkModules.length; i++) {
let currentId = options.start || 0;
for (let i = 0; i < chunkModules.length; i++) {
const m = chunkModules[i];
if(m.id === null) {
if (m.id === null) {
m.id = currentId++;
}
if(options.end && currentId > options.end)
break;
if (options.end && currentId > options.end) break;
}
});
});

View File

@@ -1,404 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
let nextIdent = 0;
class CommonsChunkPlugin {
constructor(options) {
if(arguments.length > 1) {
throw new Error(`Deprecation notice: CommonsChunkPlugin now only takes a single argument. Either an options
object *or* the name of the chunk.
Example: if your old code looked like this:
new webpack.optimize.CommonsChunkPlugin('vendor', 'vendor.bundle.js')
You would change it to:
new webpack.optimize.CommonsChunkPlugin({ name: 'vendor', filename: 'vendor.bundle.js' })
The available options are:
name: string
names: string[]
filename: string
minChunks: number
chunks: string[]
children: boolean
async: boolean
minSize: number`);
}
const normalizedOptions = this.normalizeOptions(options);
this.chunkNames = normalizedOptions.chunkNames;
this.filenameTemplate = normalizedOptions.filenameTemplate;
this.minChunks = normalizedOptions.minChunks;
this.selectedChunks = normalizedOptions.selectedChunks;
this.children = normalizedOptions.children;
this.deepChildren = normalizedOptions.deepChildren;
this.async = normalizedOptions.async;
this.minSize = normalizedOptions.minSize;
this.ident = __filename + (nextIdent++);
}
normalizeOptions(options) {
if(Array.isArray(options)) {
return {
chunkNames: options,
};
}
if(typeof options === "string") {
return {
chunkNames: [options],
};
}
// options.children and options.chunk may not be used together
if(options.children && options.chunks) {
throw new Error("You can't and it does not make any sense to use \"children\" and \"chunk\" options together.");
}
/**
* options.async and options.filename are also not possible together
* as filename specifies how the chunk is called but "async" implies
* that webpack will take care of loading this file.
*/
if(options.async && options.filename) {
throw new Error(`You can not specify a filename if you use the "async" option.
You can however specify the name of the async chunk by passing the desired string as the "async" option.`);
}
/**
* Make sure this is either an array or undefined.
* "name" can be a string and
* "names" a string or an array
*/
const chunkNames = options.name || options.names ? [].concat(options.name || options.names) : undefined;
return {
chunkNames: chunkNames,
filenameTemplate: options.filename,
minChunks: options.minChunks,
selectedChunks: options.chunks,
children: options.children,
deepChildren: options.deepChildren,
async: options.async,
minSize: options.minSize
};
}
apply(compiler) {
compiler.plugin("this-compilation", (compilation) => {
compilation.plugin(["optimize-chunks", "optimize-extracted-chunks"], (chunks) => {
// only optimize once
if(compilation[this.ident]) return;
compilation[this.ident] = true;
/**
* Creates a list of "common"" chunks based on the options.
* The list is made up of preexisting or newly created chunks.
* - If chunk has the name as specified in the chunkNames it is put in the list
* - If no chunk with the name as given in chunkNames exists a new chunk is created and added to the list
*
* These chunks are the "targets" for extracted modules.
*/
const targetChunks = this.getTargetChunks(chunks, compilation, this.chunkNames, this.children, this.async);
// iterate over all our new chunks
targetChunks.forEach((targetChunk, idx) => {
/**
* These chunks are subject to get "common" modules extracted and moved to the common chunk
*/
const affectedChunks = this.getAffectedChunks(compilation, chunks, targetChunk, targetChunks, idx, this.selectedChunks, this.async, this.children, this.deepChildren);
// bail if no chunk is affected
if(!affectedChunks) {
return;
}
// If we are async create an async chunk now
// override the "commonChunk" with the newly created async one and use it as commonChunk from now on
let asyncChunk;
if(this.async) {
// If async chunk is one of the affected chunks, just use it
asyncChunk = affectedChunks.filter(c => c.name === this.async)[0];
// Elsewise create a new one
if(!asyncChunk) {
asyncChunk = this.createAsyncChunk(
compilation,
targetChunks.length <= 1 || typeof this.async !== "string" ? this.async :
targetChunk.name ? `${this.async}-${targetChunk.name}` :
true,
targetChunk
);
}
targetChunk = asyncChunk;
}
/**
* Check which modules are "common" and could be extracted to a "common" chunk
*/
const extractableModules = this.getExtractableModules(this.minChunks, affectedChunks, targetChunk);
// If the minSize option is set check if the size extracted from the chunk is reached
// else bail out here.
// As all modules/commons are interlinked with each other, common modules would be extracted
// if we reach this mark at a later common chunk. (quirky I guess).
if(this.minSize) {
const modulesSize = this.calculateModulesSize(extractableModules);
// if too small, bail
if(modulesSize < this.minSize)
return;
}
// Remove modules that are moved to commons chunk from their original chunks
// return all chunks that are affected by having modules removed - we need them later (apparently)
const chunksWithExtractedModules = this.extractModulesAndReturnAffectedChunks(extractableModules, affectedChunks);
// connect all extracted modules with the common chunk
this.addExtractedModulesToTargetChunk(targetChunk, extractableModules);
// set filenameTemplate for chunk
if(this.filenameTemplate)
targetChunk.filenameTemplate = this.filenameTemplate;
// if we are async connect the blocks of the "reallyUsedChunk" - the ones that had modules removed -
// with the commonChunk and get the origins for the asyncChunk (remember "asyncChunk === commonChunk" at this moment).
// bail out
if(this.async) {
this.moveExtractedChunkBlocksToTargetChunk(chunksWithExtractedModules, targetChunk);
asyncChunk.origins = this.extractOriginsOfChunksWithExtractedModules(chunksWithExtractedModules);
return;
}
// we are not in "async" mode
// connect used chunks with commonChunk - shouldnt this be reallyUsedChunks here?
this.makeTargetChunkParentOfAffectedChunks(affectedChunks, targetChunk);
});
return true;
});
});
}
getTargetChunks(allChunks, compilation, chunkNames, children, asyncOption) {
const asyncOrNoSelectedChunk = children || asyncOption;
// we have specified chunk names
if(chunkNames) {
// map chunks by chunkName for quick access
const allChunksNameMap = allChunks.reduce((map, chunk) => {
if(chunk.name) {
map.set(chunk.name, chunk);
}
return map;
}, new Map());
// Ensure we have a chunk per specified chunk name.
// Reuse existing chunks if possible
return chunkNames.map(chunkName => {
if(allChunksNameMap.has(chunkName)) {
return allChunksNameMap.get(chunkName);
}
// add the filtered chunks to the compilation
return compilation.addChunk(chunkName);
});
}
// we dont have named chunks specified, so we just take all of them
if(asyncOrNoSelectedChunk) {
return allChunks;
}
/**
* No chunk name(s) was specified nor is this an async/children commons chunk
*/
throw new Error(`You did not specify any valid target chunk settings.
Take a look at the "name"/"names" or async/children option.`);
}
getAffectedUnnamedChunks(affectedChunks, targetChunk, rootChunk, asyncOption, deepChildrenOption) {
let chunks = targetChunk.chunks;
chunks && chunks.forEach((chunk) => {
if(chunk.isInitial()) {
return;
}
// If all the parents of a chunk are either
// a) the target chunk we started with
// b) themselves affected chunks
// we can assume that this chunk is an affected chunk too, as there is no way a chunk that
// isn't only depending on the target chunk is a parent of the chunk tested
if(asyncOption || chunk.parents.every((parentChunk) => parentChunk === rootChunk || affectedChunks.has(parentChunk))) {
// This check not only dedupes the affectedChunks but also guarantees we avoid endless loops
if(!affectedChunks.has(chunk)) {
// We mutate the affected chunks before going deeper, so the deeper levels and other branches
// have the information of this chunk being affected for their assertion if a chunk should
// not be affected
affectedChunks.add(chunk);
// We recurse down to all the children of the chunk, applying the same assumption.
// This guarantees that if a chunk should be an affected chunk,
// at the latest the last connection to the same chunk meets the
// condition to add it to the affected chunks.
if(deepChildrenOption === true) {
this.getAffectedUnnamedChunks(affectedChunks, chunk, rootChunk, asyncOption, deepChildrenOption);
}
}
}
});
}
getAffectedChunks(compilation, allChunks, targetChunk, targetChunks, currentIndex, selectedChunks, asyncOption, childrenOption, deepChildrenOption) {
const asyncOrNoSelectedChunk = childrenOption || asyncOption;
if(Array.isArray(selectedChunks)) {
return allChunks.filter(chunk => {
const notCommmonChunk = chunk !== targetChunk;
const isSelectedChunk = selectedChunks.indexOf(chunk.name) > -1;
return notCommmonChunk && isSelectedChunk;
});
}
if(asyncOrNoSelectedChunk) {
let affectedChunks = new Set();
this.getAffectedUnnamedChunks(affectedChunks, targetChunk, targetChunk, asyncOption, deepChildrenOption);
return Array.from(affectedChunks);
}
/**
* past this point only entry chunks are allowed to become commonChunks
*/
if(targetChunk.parents.length > 0) {
compilation.errors.push(new Error("CommonsChunkPlugin: While running in normal mode it's not allowed to use a non-entry chunk (" + targetChunk.name + ")"));
return;
}
/**
* If we find a "targetchunk" that is also a normal chunk (meaning it is probably specified as an entry)
* and the current target chunk comes after that and the found chunk has a runtime*
* make that chunk be an 'affected' chunk of the current target chunk.
*
* To understand what that means take a look at the "examples/chunkhash", this basically will
* result in the runtime to be extracted to the current target chunk.
*
* *runtime: the "runtime" is the "webpack"-block you may have seen in the bundles that resolves modules etc.
*/
return allChunks.filter((chunk) => {
const found = targetChunks.indexOf(chunk);
if(found >= currentIndex) return false;
return chunk.hasRuntime();
});
}
createAsyncChunk(compilation, asyncOption, targetChunk) {
const asyncChunk = compilation.addChunk(typeof asyncOption === "string" ? asyncOption : undefined);
asyncChunk.chunkReason = "async commons chunk";
asyncChunk.extraAsync = true;
asyncChunk.addParent(targetChunk);
targetChunk.addChunk(asyncChunk);
return asyncChunk;
}
// If minChunks is a function use that
// otherwhise check if a module is used at least minChunks or 2 or usedChunks.length time
getModuleFilter(minChunks, targetChunk, usedChunksLength) {
if(typeof minChunks === "function") {
return minChunks;
}
const minCount = (minChunks || Math.max(2, usedChunksLength));
const isUsedAtLeastMinTimes = (module, count) => count >= minCount;
return isUsedAtLeastMinTimes;
}
getExtractableModules(minChunks, usedChunks, targetChunk) {
if(minChunks === Infinity) {
return [];
}
// count how many chunks contain a module
const commonModulesToCountMap = usedChunks.reduce((map, chunk) => {
for(const module of chunk.modulesIterable) {
const count = map.has(module) ? map.get(module) : 0;
map.set(module, count + 1);
}
return map;
}, new Map());
// filter by minChunks
const moduleFilterCount = this.getModuleFilter(minChunks, targetChunk, usedChunks.length);
// filter by condition
const moduleFilterCondition = (module, chunk) => {
if(!module.chunkCondition) {
return true;
}
return module.chunkCondition(chunk);
};
return Array.from(commonModulesToCountMap).filter(entry => {
const module = entry[0];
const count = entry[1];
// if the module passes both filters, keep it.
return moduleFilterCount(module, count) && moduleFilterCondition(module, targetChunk);
}).map(entry => entry[0]);
}
calculateModulesSize(modules) {
return modules.reduce((totalSize, module) => totalSize + module.size(), 0);
}
extractModulesAndReturnAffectedChunks(reallyUsedModules, usedChunks) {
return reallyUsedModules.reduce((affectedChunksSet, module) => {
for(const chunk of usedChunks) {
// removeChunk returns true if the chunk was contained and succesfully removed
// false if the module did not have a connection to the chunk in question
if(module.removeChunk(chunk)) {
affectedChunksSet.add(chunk);
}
}
return affectedChunksSet;
}, new Set());
}
addExtractedModulesToTargetChunk(chunk, modules) {
for(const module of modules) {
chunk.addModule(module);
module.addChunk(chunk);
}
}
makeTargetChunkParentOfAffectedChunks(usedChunks, commonChunk) {
for(const chunk of usedChunks) {
// set commonChunk as new sole parent
chunk.parents = [commonChunk];
// add chunk to commonChunk
commonChunk.addChunk(chunk);
for(const entrypoint of chunk.entrypoints) {
entrypoint.insertChunk(commonChunk, chunk);
}
}
}
moveExtractedChunkBlocksToTargetChunk(chunks, targetChunk) {
for(const chunk of chunks) {
if(chunk === targetChunk) continue;
for(const block of chunk.blocks) {
if(block.chunks.indexOf(targetChunk) === -1) {
block.chunks.unshift(targetChunk);
}
targetChunk.addBlock(block);
}
}
}
extractOriginsOfChunksWithExtractedModules(chunks) {
const origins = [];
for(const chunk of chunks) {
for(const origin of chunk.origins) {
const newOrigin = Object.create(origin);
newOrigin.reasons = (origin.reasons || []).concat("async commons");
origins.push(newOrigin);
}
}
return origins;
}
}
module.exports = CommonsChunkPlugin;

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class DedupePlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
compilation.warnings.push(new Error("DedupePlugin: This plugin was removed from webpack. Remove it from your configuration."));
});
}
}
module.exports = DedupePlugin;

View File

@@ -4,37 +4,67 @@
*/
"use strict";
class EnsureChunkConditionsPlugin {
const GraphHelpers = require("../GraphHelpers");
class EnsureChunkConditionsPlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
const triesMap = new Map();
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], (chunks) => {
let changed = false;
chunks.forEach((chunk) => {
chunk.forEachModule((module) => {
if(!module.chunkCondition) return;
if(!module.chunkCondition(chunk)) {
let usedChunks = triesMap.get(module);
if(!usedChunks) triesMap.set(module, usedChunks = new Set());
usedChunks.add(chunk);
const newChunks = [];
chunk.parents.forEach((parent) => {
if(!usedChunks.has(parent)) {
parent.addModule(module);
module.addChunk(parent);
newChunks.push(parent);
compiler.hooks.compilation.tap(
"EnsureChunkConditionsPlugin",
compilation => {
const handler = chunks => {
let changed = false;
for (const module of compilation.modules) {
if (!module.chunkCondition) continue;
const sourceChunks = new Set();
const chunkGroups = new Set();
for (const chunk of module.chunksIterable) {
if (!module.chunkCondition(chunk)) {
sourceChunks.add(chunk);
for (const group of chunk.groupsIterable) {
chunkGroups.add(group);
}
});
module.rewriteChunkInReasons(chunk, newChunks);
chunk.removeModule(module);
changed = true;
}
}
});
});
if(changed) return true;
});
});
if (sourceChunks.size === 0) continue;
const targetChunks = new Set();
chunkGroupLoop: for (const chunkGroup of chunkGroups) {
// Can module be placed in a chunk of this group?
for (const chunk of chunkGroup.chunks) {
if (module.chunkCondition(chunk)) {
targetChunks.add(chunk);
continue chunkGroupLoop;
}
}
// We reached the entrypoint: fail
if (chunkGroup.isInitial()) {
throw new Error(
"Cannot fullfil chunk condition of " + module.identifier()
);
}
// Try placing in all parents
for (const group of chunkGroup.parentsIterable) {
chunkGroups.add(group);
}
}
for (const sourceChunk of sourceChunks) {
GraphHelpers.disconnectChunkAndModule(sourceChunk, module);
}
for (const targetChunk of targetChunks) {
GraphHelpers.connectChunkAndModule(targetChunk, module);
}
}
if (changed) return true;
};
compilation.hooks.optimizeChunksBasic.tap(
"EnsureChunkConditionsPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"EnsureChunkConditionsPlugin",
handler
);
}
);
}
}
module.exports = EnsureChunkConditionsPlugin;

View File

@@ -5,30 +5,94 @@
"use strict";
class FlagIncludedChunksPlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
compilation.plugin("optimize-chunk-ids", (chunks) => {
chunks.forEach((chunkA) => {
chunks.forEach((chunkB) => {
// as we iterate the same iterables twice
// skip if we find ourselves
if(chunkA === chunkB) return;
compiler.hooks.compilation.tap("FlagIncludedChunksPlugin", compilation => {
compilation.hooks.optimizeChunkIds.tap(
"FlagIncludedChunksPlugin",
chunks => {
// prepare two bit integers for each module
// 2^31 is the max number represented as SMI in v8
// we want the bits distributed this way:
// the bit 2^31 is pretty rar and only one module should get it
// so it has a probability of 1 / modulesCount
// the first bit (2^0) is the easiest and every module could get it
// if it doesn't get a better bit
// from bit 2^n to 2^(n+1) there is a probability of p
// so 1 / modulesCount == p^31
// <=> p = sqrt31(1 / modulesCount)
// so we use a modulo of 1 / sqrt31(1 / modulesCount)
const moduleBits = new WeakMap();
const modulesCount = compilation.modules.length;
// instead of swapping A and B just bail
// as we loop twice the current A will be B and B then A
if(chunkA.getNumberOfModules() < chunkB.getNumberOfModules()) return;
// precalculate the modulo values for each bit
const modulo = 1 / Math.pow(1 / modulesCount, 1 / 31);
const modulos = Array.from(
{ length: 31 },
(x, i) => Math.pow(modulo, i) | 0
);
if(chunkB.getNumberOfModules() === 0) return;
// is chunkB in chunkA?
for(const m of chunkB.modulesIterable) {
if(!chunkA.containsModule(m)) return;
// iterate all modules to generate bit values
let i = 0;
for (const module of compilation.modules) {
let bit = 30;
while (i % modulos[bit] !== 0) {
bit--;
}
chunkA.ids.push(chunkB.id);
});
});
});
moduleBits.set(module, 1 << bit);
i++;
}
// interate all chunks to generate bitmaps
const chunkModulesHash = new WeakMap();
for (const chunk of chunks) {
let hash = 0;
for (const module of chunk.modulesIterable) {
hash |= moduleBits.get(module);
}
chunkModulesHash.set(chunk, hash);
}
for (const chunkA of chunks) {
const chunkAHash = chunkModulesHash.get(chunkA);
const chunkAModulesCount = chunkA.getNumberOfModules();
if (chunkAModulesCount === 0) continue;
let bestModule = undefined;
for (const module of chunkA.modulesIterable) {
if (
bestModule === undefined ||
bestModule.getNumberOfChunks() > module.getNumberOfChunks()
)
bestModule = module;
}
loopB: for (const chunkB of bestModule.chunksIterable) {
// as we iterate the same iterables twice
// skip if we find ourselves
if (chunkA === chunkB) continue;
const chunkBModulesCount = chunkB.getNumberOfModules();
// ids for empty chunks are not included
if (chunkBModulesCount === 0) continue;
// instead of swapping A and B just bail
// as we loop twice the current A will be B and B then A
if (chunkAModulesCount > chunkBModulesCount) continue;
// is chunkA in chunkB?
// we do a cheap check for the hash value
const chunkBHash = chunkModulesHash.get(chunkB);
if ((chunkBHash & chunkAHash) !== chunkAHash) continue;
// compare all modules
for (const m of chunkA.modulesIterable) {
if (!chunkB.containsModule(m)) continue loopB;
}
chunkB.ids.push(chunkA.id);
}
}
}
);
});
}
}

View File

@@ -4,55 +4,75 @@
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
class LimitChunkCountPlugin {
/**
* @param {LimitChunkCountPluginOptions=} options options object
*/
constructor(options) {
if(options !== undefined && typeof options !== "object" || Array.isArray(options)) {
throw new Error("Argument should be an options object.\nFor more info on options, see https://webpack.js.org/plugins/");
}
this.options = options || {};
if (!options) options = {};
validateOptions(schema, options, "Limit Chunk Count Plugin");
this.options = options;
}
apply(compiler) {
const options = this.options;
compiler.plugin("compilation", (compilation) => {
compilation.plugin("optimize-chunks-advanced", (chunks) => {
const maxChunks = options.maxChunks;
if(!maxChunks) return;
if(maxChunks < 1) return;
if(chunks.length <= maxChunks) return;
compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"LimitChunkCountPlugin",
chunks => {
const maxChunks = options.maxChunks;
if (!maxChunks) return;
if (maxChunks < 1) return;
if (chunks.length <= maxChunks) return;
if(chunks.length > maxChunks) {
const sortedExtendedPairCombinations = chunks.reduce((combinations, a, idx) => {
// create combination pairs
for(let i = 0; i < idx; i++) {
const b = chunks[i];
combinations.push([b, a]);
}
return combinations;
}, []).map((pair) => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1], a, b];
}).filter((extendedPair) => {
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return extendedPair[1] !== false;
}).sort((a, b) => { // sadly javascript does an inplace sort here
// sort them by size
const diff = b[0] - a[0];
if(diff !== 0) return diff;
return a[1] - b[1];
});
const orderedChunks = chunks.slice().sort((a, b) => a.compareTo(b));
const sortedExtendedPairCombinations = orderedChunks
.reduce((combinations, a, idx) => {
// create combination pairs
for (let i = 0; i < idx; i++) {
const b = orderedChunks[i];
combinations.push([b, a]);
}
return combinations;
}, [])
.map(pair => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1], a, b];
})
.filter(extendedPair => {
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return extendedPair[1] !== false;
})
.sort((a, b) => {
// sadly javascript does an inplace sort here
// sort them by size
const diff1 = b[0] - a[0];
if (diff1 !== 0) return diff1;
const diff2 = a[1] - b[1];
if (diff2 !== 0) return diff2;
const diff3 = a[2].compareTo(b[2]);
if (diff3 !== 0) return diff3;
return a[3].compareTo(b[3]);
});
const pair = sortedExtendedPairCombinations[0];
if(pair && pair[2].integrate(pair[3], "limit")) {
if (pair && pair[2].integrate(pair[3], "limit")) {
chunks.splice(chunks.indexOf(pair[3]), 1);
return true;
}
}
});
);
});
}
}

View File

@@ -5,24 +5,74 @@
"use strict";
class MergeDuplicateChunksPlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
compilation.plugin("optimize-chunks-basic", (chunks) => {
const map = Object.create(null);
chunks.slice().forEach((chunk) => {
if(chunk.hasRuntime() || chunk.hasEntryModule()) return;
const ident = chunk.getModulesIdent();
const otherChunk = map[ident];
if(otherChunk) {
if(otherChunk.integrate(chunk, "duplicate"))
chunks.splice(chunks.indexOf(chunk), 1);
return;
compiler.hooks.compilation.tap(
"MergeDuplicateChunksPlugin",
compilation => {
compilation.hooks.optimizeChunksBasic.tap(
"MergeDuplicateChunksPlugin",
chunks => {
// remember already tested chunks for performance
const notDuplicates = new Set();
// for each chunk
for (const chunk of chunks) {
// track a Set of all chunk that could be duplicates
let possibleDuplicates;
for (const module of chunk.modulesIterable) {
if (possibleDuplicates === undefined) {
// when possibleDuplicates is not yet set,
// create a new Set from chunks of the current module
// including only chunks with the same number of modules
for (const dup of module.chunksIterable) {
if (
dup !== chunk &&
chunk.getNumberOfModules() === dup.getNumberOfModules() &&
!notDuplicates.has(dup)
) {
// delay allocating the new Set until here, reduce memory pressure
if (possibleDuplicates === undefined) {
possibleDuplicates = new Set();
}
possibleDuplicates.add(dup);
}
}
// when no chunk is possible we can break here
if (possibleDuplicates === undefined) break;
} else {
// validate existing possible duplicates
for (const dup of possibleDuplicates) {
// remove possible duplicate when module is not contained
if (!dup.containsModule(module)) {
possibleDuplicates.delete(dup);
}
}
// when all chunks has been removed we can break here
if (possibleDuplicates.size === 0) break;
}
}
// when we found duplicates
if (
possibleDuplicates !== undefined &&
possibleDuplicates.size > 0
) {
for (const otherChunk of possibleDuplicates) {
if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
// merge them
if (chunk.integrate(otherChunk, "duplicate")) {
chunks.splice(chunks.indexOf(otherChunk), 1);
}
}
}
// don't check already processed chunks twice
notDuplicates.add(chunk);
}
}
map[ident] = chunk;
});
});
});
);
}
);
}
}
module.exports = MergeDuplicateChunksPlugin;

View File

@@ -4,61 +4,78 @@
*/
"use strict";
const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/MinChunkSizePlugin.json");
/** @typedef {import("../../declarations/plugins/optimize/MinChunkSizePlugin").MinChunkSizePluginOptions} MinChunkSizePluginOptions */
class MinChunkSizePlugin {
/**
* @param {MinChunkSizePluginOptions} options options object
*/
constructor(options) {
if(typeof options !== "object" || Array.isArray(options)) {
throw new Error("Argument should be an options object.\nFor more info on options, see https://webpack.js.org/plugins/");
}
validateOptions(schema, options, "Min Chunk Size Plugin");
this.options = options;
}
apply(compiler) {
const options = this.options;
const minChunkSize = options.minChunkSize;
compiler.plugin("compilation", (compilation) => {
compilation.plugin("optimize-chunks-advanced", (chunks) => {
const equalOptions = {
chunkOverhead: 1,
entryChunkMultiplicator: 1
};
compiler.hooks.compilation.tap("MinChunkSizePlugin", compilation => {
compilation.hooks.optimizeChunksAdvanced.tap(
"MinChunkSizePlugin",
chunks => {
const equalOptions = {
chunkOverhead: 1,
entryChunkMultiplicator: 1
};
const sortedSizeFilteredExtendedPairCombinations = chunks.reduce((combinations, a, idx) => {
// create combination pairs
for(let i = 0; i < idx; i++) {
const b = chunks[i];
combinations.push([b, a]);
}
return combinations;
}, []).filter((pair) => {
// check if one of the chunks sizes is smaller than the minChunkSize
const p0SmallerThanMinChunkSize = pair[0].size(equalOptions) < minChunkSize;
const p1SmallerThanMinChunkSize = pair[1].size(equalOptions) < minChunkSize;
return p0SmallerThanMinChunkSize || p1SmallerThanMinChunkSize;
}).map((pair) => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1]];
}).filter((pair) => {
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return pair[1] !== false;
}).sort((a, b) => { // sadly javascript does an inplace sort here
// sort by size
const diff = b[0] - a[0];
if(diff !== 0) return diff;
return a[1] - b[1];
});
const sortedSizeFilteredExtendedPairCombinations = chunks
.reduce((combinations, a, idx) => {
// create combination pairs
for (let i = 0; i < idx; i++) {
const b = chunks[i];
combinations.push([b, a]);
}
return combinations;
}, [])
.filter(pair => {
// check if one of the chunks sizes is smaller than the minChunkSize
const p0SmallerThanMinChunkSize =
pair[0].size(equalOptions) < minChunkSize;
const p1SmallerThanMinChunkSize =
pair[1].size(equalOptions) < minChunkSize;
return p0SmallerThanMinChunkSize || p1SmallerThanMinChunkSize;
})
.map(pair => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1]];
})
.filter(pair => {
// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return pair[1] !== false;
})
.sort((a, b) => {
// sadly javascript does an inplace sort here
// sort by size
const diff = b[0] - a[0];
if (diff !== 0) return diff;
return a[1] - b[1];
});
if(sortedSizeFilteredExtendedPairCombinations.length === 0) return;
if (sortedSizeFilteredExtendedPairCombinations.length === 0) return;
const pair = sortedSizeFilteredExtendedPairCombinations[0];
const pair = sortedSizeFilteredExtendedPairCombinations[0];
pair[2].integrate(pair[3], "min-size");
chunks.splice(chunks.indexOf(pair[3]), 1);
return true;
});
pair[2].integrate(pair[3], "min-size");
chunks.splice(chunks.indexOf(pair[3]), 1);
return true;
}
);
});
}
}

View File

@@ -8,221 +8,370 @@ const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency
const ModuleHotAcceptDependency = require("../dependencies/ModuleHotAcceptDependency");
const ModuleHotDeclineDependency = require("../dependencies/ModuleHotDeclineDependency");
const ConcatenatedModule = require("./ConcatenatedModule");
const HarmonyExportImportedSpecifierDependency = require("../dependencies/HarmonyExportImportedSpecifierDependency");
const HarmonyCompatibilityDependency = require("../dependencies/HarmonyCompatibilityDependency");
const StackedSetMap = require("../util/StackedSetMap");
function formatBailoutReason(msg) {
const formatBailoutReason = msg => {
return "ModuleConcatenation bailout: " + msg;
}
};
class ModuleConcatenationPlugin {
constructor(options) {
if(typeof options !== "object") options = {};
if (typeof options !== "object") options = {};
this.options = options;
}
apply(compiler) {
compiler.plugin("compilation", (compilation, params) => {
params.normalModuleFactory.plugin("parser", (parser, parserOptions) => {
parser.plugin("call eval", () => {
parser.state.module.meta.hasEval = true;
});
});
const bailoutReasonMap = new Map();
function setBailoutReason(module, reason) {
bailoutReasonMap.set(module, reason);
module.optimizationBailout.push(typeof reason === "function" ? (rs) => formatBailoutReason(reason(rs)) : formatBailoutReason(reason));
}
function getBailoutReason(module, requestShortener) {
const reason = bailoutReasonMap.get(module);
if(typeof reason === "function") return reason(requestShortener);
return reason;
}
compilation.plugin("optimize-chunk-modules", (chunks, modules) => {
const relevantModules = [];
const possibleInners = new Set();
for(const module of modules) {
// Only harmony modules are valid for optimization
if(!module.meta || !module.meta.harmonyModule || !module.dependencies.some(d => d instanceof HarmonyCompatibilityDependency)) {
setBailoutReason(module, "Module is not an ECMAScript module");
continue;
}
// Because of variable renaming we can't use modules with eval
if(module.meta && module.meta.hasEval) {
setBailoutReason(module, "Module uses eval()");
continue;
}
// Exports must be known (and not dynamic)
if(!Array.isArray(module.providedExports)) {
setBailoutReason(module, "Module exports are unknown");
continue;
}
// Using dependency variables is not possible as this wraps the code in a function
if(module.variables.length > 0) {
setBailoutReason(module, `Module uses injected variables (${module.variables.map(v => v.name).join(", ")})`);
continue;
}
// Hot Module Replacement need it's own module to work correctly
if(module.dependencies.some(dep => dep instanceof ModuleHotAcceptDependency || dep instanceof ModuleHotDeclineDependency)) {
setBailoutReason(module, "Module uses Hot Module Replacement");
continue;
}
relevantModules.push(module);
// Module must not be the entry points
if(module.getChunks().some(chunk => chunk.entryModule === module)) {
setBailoutReason(module, "Module is an entry point");
continue;
}
// Module must only be used by Harmony Imports
const nonHarmonyReasons = module.reasons.filter(reason => !(reason.dependency instanceof HarmonyImportDependency));
if(nonHarmonyReasons.length > 0) {
const importingModules = new Set(nonHarmonyReasons.map(r => r.module));
const importingModuleTypes = new Map(Array.from(importingModules).map(m => [m, new Set(nonHarmonyReasons.filter(r => r.module === m).map(r => r.dependency.type).sort())]));
setBailoutReason(module, (requestShortener) => {
const names = Array.from(importingModules).map(m => `${m.readableIdentifier(requestShortener)} (referenced with ${Array.from(importingModuleTypes.get(m)).join(", ")})`).sort();
return `Module is referenced from these modules with unsupported syntax: ${names.join(", ")}`;
});
continue;
}
possibleInners.add(module);
}
// sort by depth
// modules with lower depth are more likely suited as roots
// this improves performance, because modules already selected as inner are skipped
relevantModules.sort((a, b) => {
return a.depth - b.depth;
});
const concatConfigurations = [];
const usedAsInner = new Set();
for(const currentRoot of relevantModules) {
// when used by another configuration as inner:
// the other configuration is better and we can skip this one
if(usedAsInner.has(currentRoot))
continue;
// create a configuration with the root
const currentConfiguration = new ConcatConfiguration(currentRoot);
// cache failures to add modules
const failureCache = new Map();
// try to add all imports
for(const imp of this.getImports(currentRoot)) {
const problem = this.tryToAdd(currentConfiguration, imp, possibleInners, failureCache);
if(problem) {
failureCache.set(imp, problem);
currentConfiguration.addWarning(imp, problem);
}
}
if(!currentConfiguration.isEmpty()) {
concatConfigurations.push(currentConfiguration);
for(const module of currentConfiguration.modules) {
if(module !== currentConfiguration.rootModule)
usedAsInner.add(module);
}
}
}
// HACK: Sort configurations by length and start with the longest one
// to get the biggers groups possible. Used modules are marked with usedModules
// TODO: Allow to reuse existing configuration while trying to add dependencies.
// This would improve performance. O(n^2) -> O(n)
concatConfigurations.sort((a, b) => {
return b.modules.size - a.modules.size;
});
const usedModules = new Set();
for(const concatConfiguration of concatConfigurations) {
if(usedModules.has(concatConfiguration.rootModule))
continue;
const newModule = new ConcatenatedModule(concatConfiguration.rootModule, Array.from(concatConfiguration.modules));
concatConfiguration.sortWarnings();
for(const warning of concatConfiguration.warnings) {
newModule.optimizationBailout.push((requestShortener) => {
const reason = getBailoutReason(warning[0], requestShortener);
const reasonWithPrefix = reason ? ` (<- ${reason})` : "";
if(warning[0] === warning[1])
return formatBailoutReason(`Cannot concat with ${warning[0].readableIdentifier(requestShortener)}${reasonWithPrefix}`);
else
return formatBailoutReason(`Cannot concat with ${warning[0].readableIdentifier(requestShortener)} because of ${warning[1].readableIdentifier(requestShortener)}${reasonWithPrefix}`);
});
}
const chunks = concatConfiguration.rootModule.getChunks();
for(const m of concatConfiguration.modules) {
usedModules.add(m);
chunks.forEach(chunk => chunk.removeModule(m));
}
chunks.forEach(chunk => {
chunk.addModule(newModule);
newModule.addChunk(chunk);
if(chunk.entryModule === concatConfiguration.rootModule)
chunk.entryModule = newModule;
compiler.hooks.compilation.tap(
"ModuleConcatenationPlugin",
(compilation, { normalModuleFactory }) => {
const handler = (parser, parserOptions) => {
parser.hooks.call.for("eval").tap("ModuleConcatenationPlugin", () => {
// Because of variable renaming we can't use modules with eval.
parser.state.module.buildMeta.moduleConcatenationBailout = "eval()";
});
compilation.modules.push(newModule);
newModule.reasons.forEach(reason => reason.dependency.module = newModule);
newModule.dependencies.forEach(dep => {
if(dep.module) {
dep.module.reasons.forEach(reason => {
if(reason.dependency === dep)
reason.module = newModule;
});
};
normalModuleFactory.hooks.parser
.for("javascript/auto")
.tap("ModuleConcatenationPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/dynamic")
.tap("ModuleConcatenationPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/esm")
.tap("ModuleConcatenationPlugin", handler);
const bailoutReasonMap = new Map();
const setBailoutReason = (module, reason) => {
bailoutReasonMap.set(module, reason);
module.optimizationBailout.push(
typeof reason === "function"
? rs => formatBailoutReason(reason(rs))
: formatBailoutReason(reason)
);
};
const getBailoutReason = (module, requestShortener) => {
const reason = bailoutReasonMap.get(module);
if (typeof reason === "function") return reason(requestShortener);
return reason;
};
compilation.hooks.optimizeChunkModules.tap(
"ModuleConcatenationPlugin",
(chunks, modules) => {
const relevantModules = [];
const possibleInners = new Set();
for (const module of modules) {
// Only harmony modules are valid for optimization
if (
!module.buildMeta ||
module.buildMeta.exportsType !== "namespace" ||
!module.dependencies.some(
d => d instanceof HarmonyCompatibilityDependency
)
) {
setBailoutReason(module, "Module is not an ECMAScript module");
continue;
}
// Some expressions are not compatible with module concatenation
// because they may produce unexpected results. The plugin bails out
// if some were detected upfront.
if (
module.buildMeta &&
module.buildMeta.moduleConcatenationBailout
) {
setBailoutReason(
module,
`Module uses ${module.buildMeta.moduleConcatenationBailout}`
);
continue;
}
// Exports must be known (and not dynamic)
if (!Array.isArray(module.buildMeta.providedExports)) {
setBailoutReason(module, "Module exports are unknown");
continue;
}
// Using dependency variables is not possible as this wraps the code in a function
if (module.variables.length > 0) {
setBailoutReason(
module,
`Module uses injected variables (${module.variables
.map(v => v.name)
.join(", ")})`
);
continue;
}
// Hot Module Replacement need it's own module to work correctly
if (
module.dependencies.some(
dep =>
dep instanceof ModuleHotAcceptDependency ||
dep instanceof ModuleHotDeclineDependency
)
) {
setBailoutReason(module, "Module uses Hot Module Replacement");
continue;
}
relevantModules.push(module);
// Module must not be the entry points
if (module.isEntryModule()) {
setBailoutReason(module, "Module is an entry point");
continue;
}
// Module must be in any chunk (we don't want to do useless work)
if (module.getNumberOfChunks() === 0) {
setBailoutReason(module, "Module is not in any chunk");
continue;
}
// Module must only be used by Harmony Imports
const nonHarmonyReasons = module.reasons.filter(
reason =>
!reason.dependency ||
!(reason.dependency instanceof HarmonyImportDependency)
);
if (nonHarmonyReasons.length > 0) {
const importingModules = new Set(
nonHarmonyReasons.map(r => r.module).filter(Boolean)
);
const importingExplanations = new Set(
nonHarmonyReasons.map(r => r.explanation).filter(Boolean)
);
const importingModuleTypes = new Map(
Array.from(importingModules).map(
m => /** @type {[string, Set]} */ ([
m,
new Set(
nonHarmonyReasons
.filter(r => r.module === m)
.map(r => r.dependency.type)
.sort()
)
])
)
);
setBailoutReason(module, requestShortener => {
const names = Array.from(importingModules)
.map(
m =>
`${m.readableIdentifier(
requestShortener
)} (referenced with ${Array.from(
importingModuleTypes.get(m)
).join(", ")})`
)
.sort();
const explanations = Array.from(importingExplanations).sort();
if (names.length > 0 && explanations.length === 0) {
return `Module is referenced from these modules with unsupported syntax: ${names.join(
", "
)}`;
} else if (names.length === 0 && explanations.length > 0) {
return `Module is referenced by: ${explanations.join(
", "
)}`;
} else if (names.length > 0 && explanations.length > 0) {
return `Module is referenced from these modules with unsupported syntax: ${names.join(
", "
)} and by: ${explanations.join(", ")}`;
} else {
return "Module is referenced in a unsupported way";
}
});
continue;
}
possibleInners.add(module);
}
});
}
compilation.modules = compilation.modules.filter(m => !usedModules.has(m));
});
});
}
// sort by depth
// modules with lower depth are more likely suited as roots
// this improves performance, because modules already selected as inner are skipped
relevantModules.sort((a, b) => {
return a.depth - b.depth;
});
const concatConfigurations = [];
const usedAsInner = new Set();
for (const currentRoot of relevantModules) {
// when used by another configuration as inner:
// the other configuration is better and we can skip this one
if (usedAsInner.has(currentRoot)) continue;
getImports(module) {
return Array.from(new Set(module.dependencies
// create a configuration with the root
const currentConfiguration = new ConcatConfiguration(currentRoot);
// Only harmony Dependencies
.filter(dep => dep instanceof HarmonyImportDependency && dep.module)
// cache failures to add modules
const failureCache = new Map();
// Dependencies are simple enough to concat them
.filter(dep => {
return !module.dependencies.some(d =>
d instanceof HarmonyExportImportedSpecifierDependency &&
d.importDependency === dep &&
!d.id &&
!Array.isArray(dep.module.providedExports)
// try to add all imports
for (const imp of this._getImports(compilation, currentRoot)) {
const problem = this._tryToAdd(
compilation,
currentConfiguration,
imp,
possibleInners,
failureCache
);
if (problem) {
failureCache.set(imp, problem);
currentConfiguration.addWarning(imp, problem);
}
}
if (!currentConfiguration.isEmpty()) {
concatConfigurations.push(currentConfiguration);
for (const module of currentConfiguration.getModules()) {
if (module !== currentConfiguration.rootModule) {
usedAsInner.add(module);
}
}
}
}
// HACK: Sort configurations by length and start with the longest one
// to get the biggers groups possible. Used modules are marked with usedModules
// TODO: Allow to reuse existing configuration while trying to add dependencies.
// This would improve performance. O(n^2) -> O(n)
concatConfigurations.sort((a, b) => {
return b.modules.size - a.modules.size;
});
const usedModules = new Set();
for (const concatConfiguration of concatConfigurations) {
if (usedModules.has(concatConfiguration.rootModule)) continue;
const modules = concatConfiguration.getModules();
const rootModule = concatConfiguration.rootModule;
const newModule = new ConcatenatedModule(
rootModule,
Array.from(modules),
ConcatenatedModule.createConcatenationList(
rootModule,
modules,
compilation
)
);
for (const warning of concatConfiguration.getWarningsSorted()) {
newModule.optimizationBailout.push(requestShortener => {
const reason = getBailoutReason(warning[0], requestShortener);
const reasonWithPrefix = reason ? ` (<- ${reason})` : "";
if (warning[0] === warning[1]) {
return formatBailoutReason(
`Cannot concat with ${warning[0].readableIdentifier(
requestShortener
)}${reasonWithPrefix}`
);
} else {
return formatBailoutReason(
`Cannot concat with ${warning[0].readableIdentifier(
requestShortener
)} because of ${warning[1].readableIdentifier(
requestShortener
)}${reasonWithPrefix}`
);
}
});
}
const chunks = concatConfiguration.rootModule.getChunks();
for (const m of modules) {
usedModules.add(m);
for (const chunk of chunks) {
chunk.removeModule(m);
}
}
for (const chunk of chunks) {
chunk.addModule(newModule);
newModule.addChunk(chunk);
if (chunk.entryModule === concatConfiguration.rootModule) {
chunk.entryModule = newModule;
}
}
compilation.modules.push(newModule);
for (const reason of newModule.reasons) {
if (reason.dependency.module === concatConfiguration.rootModule)
reason.dependency.module = newModule;
if (
reason.dependency.redirectedModule ===
concatConfiguration.rootModule
)
reason.dependency.redirectedModule = newModule;
}
// TODO: remove when LTS node version contains fixed v8 version
// @see https://github.com/webpack/webpack/pull/6613
// Turbofan does not correctly inline for-of loops with polymorphic input arrays.
// Work around issue by using a standard for loop and assigning dep.module.reasons
for (let i = 0; i < newModule.dependencies.length; i++) {
let dep = newModule.dependencies[i];
if (dep.module) {
let reasons = dep.module.reasons;
for (let j = 0; j < reasons.length; j++) {
let reason = reasons[j];
if (reason.dependency === dep) {
reason.module = newModule;
}
}
}
}
}
compilation.modules = compilation.modules.filter(
m => !usedModules.has(m)
);
}
);
})
// Take the imported module
.map(dep => dep.module)
));
}
);
}
tryToAdd(config, module, possibleModules, failureCache) {
_getImports(compilation, module) {
return new Set(
module.dependencies
// Get reference info only for harmony Dependencies
.map(dep => {
if (!(dep instanceof HarmonyImportDependency)) return null;
if (!compilation) return dep.getReference();
return compilation.getDependencyReference(module, dep);
})
// Reference is valid and has a module
// Dependencies are simple enough to concat them
.filter(
ref =>
ref &&
ref.module &&
(Array.isArray(ref.importedNames) ||
Array.isArray(ref.module.buildMeta.providedExports))
)
// Take the imported module
.map(ref => ref.module)
);
}
_tryToAdd(compilation, config, module, possibleModules, failureCache) {
const cacheEntry = failureCache.get(module);
if(cacheEntry) {
if (cacheEntry) {
return cacheEntry;
}
// Already added?
if(config.has(module)) {
if (config.has(module)) {
return null;
}
// Not possible to add?
if(!possibleModules.has(module)) {
if (!possibleModules.has(module)) {
failureCache.set(module, module); // cache failures for performance
return module;
}
// module must be in the same chunks
if(!config.rootModule.hasEqualsChunks(module)) {
if (!config.rootModule.hasEqualsChunks(module)) {
failureCache.set(module, module); // cache failures for performance
return module;
}
@@ -234,33 +383,58 @@ class ModuleConcatenationPlugin {
testConfig.add(module);
// Every module which depends on the added module must be in the configuration too.
for(const reason of module.reasons) {
const problem = this.tryToAdd(testConfig, reason.module, possibleModules, failureCache);
if(problem) {
for (const reason of module.reasons) {
// Modules that are not used can be ignored
if (
reason.module.factoryMeta.sideEffectFree &&
reason.module.used === false
)
continue;
const problem = this._tryToAdd(
compilation,
testConfig,
reason.module,
possibleModules,
failureCache
);
if (problem) {
failureCache.set(module, problem); // cache failures for performance
return problem;
}
}
// Eagerly try to add imports too if possible
for(const imp of this.getImports(module)) {
const problem = this.tryToAdd(testConfig, imp, possibleModules, failureCache);
if(problem) {
config.addWarning(module, problem);
}
}
// Commit experimental changes
config.set(testConfig);
// Eagerly try to add imports too if possible
for (const imp of this._getImports(compilation, module)) {
const problem = this._tryToAdd(
compilation,
config,
imp,
possibleModules,
failureCache
);
if (problem) {
config.addWarning(imp, problem);
}
}
return null;
}
}
class ConcatConfiguration {
constructor(rootModule) {
constructor(rootModule, cloneFrom) {
this.rootModule = rootModule;
this.modules = new Set([rootModule]);
this.warnings = new Map();
if (cloneFrom) {
this.modules = cloneFrom.modules.createChild(5);
this.warnings = cloneFrom.warnings.createChild(5);
} else {
this.modules = new StackedSetMap();
this.modules.add(rootModule);
this.warnings = new StackedSetMap();
}
}
add(module) {
@@ -279,29 +453,30 @@ class ConcatConfiguration {
this.warnings.set(module, problem);
}
sortWarnings() {
this.warnings = new Map(Array.from(this.warnings).sort((a, b) => {
const ai = a[0].identifier();
const bi = b[0].identifier();
if(ai < bi) return -1;
if(ai > bi) return 1;
return 0;
}));
getWarningsSorted() {
return new Map(
this.warnings.asPairArray().sort((a, b) => {
const ai = a[0].identifier();
const bi = b[0].identifier();
if (ai < bi) return -1;
if (ai > bi) return 1;
return 0;
})
);
}
getModules() {
return this.modules.asSet();
}
clone() {
const clone = new ConcatConfiguration(this.rootModule);
for(const module of this.modules)
clone.add(module);
for(const pair of this.warnings)
clone.addWarning(pair[0], pair[1]);
return clone;
return new ConcatConfiguration(this.rootModule, this);
}
set(config) {
this.rootModule = config.rootModule;
this.modules = new Set(config.modules);
this.warnings = new Map(config.warnings);
this.modules = config.modules;
this.warnings = config.warnings;
}
}

View File

@@ -4,97 +4,130 @@
*/
"use strict";
// TODO webpack 5 remove this plugin
// It has been splitted into separate plugins for modules and chunks
class OccurrenceOrderPlugin {
constructor(preferEntry) {
if(preferEntry !== undefined && typeof preferEntry !== "boolean") {
throw new Error("Argument should be a boolean.\nFor more info on this plugin, see https://webpack.js.org/plugins/");
if (preferEntry !== undefined && typeof preferEntry !== "boolean") {
throw new Error(
"Argument should be a boolean.\nFor more info on this plugin, see https://webpack.js.org/plugins/"
);
}
this.preferEntry = preferEntry;
}
apply(compiler) {
const preferEntry = this.preferEntry;
compiler.plugin("compilation", (compilation) => {
compilation.plugin("optimize-module-order", (modules) => {
const occursInInitialChunksMap = new Map();
const occursInAllChunksMap = new Map();
compiler.hooks.compilation.tap("OccurrenceOrderPlugin", compilation => {
compilation.hooks.optimizeModuleOrder.tap(
"OccurrenceOrderPlugin",
modules => {
const occursInInitialChunksMap = new Map();
const occursInAllChunksMap = new Map();
const initialChunkChunkMap = new Map();
const entryCountMap = new Map();
modules.forEach(m => {
let initial = 0;
let entry = 0;
m.forEachChunk(c => {
if(c.isInitial()) initial++;
if(c.entryModule === m) entry++;
});
initialChunkChunkMap.set(m, initial);
entryCountMap.set(m, entry);
});
const initialChunkChunkMap = new Map();
const entryCountMap = new Map();
for (const m of modules) {
let initial = 0;
let entry = 0;
for (const c of m.chunksIterable) {
if (c.canBeInitial()) initial++;
if (c.entryModule === m) entry++;
}
initialChunkChunkMap.set(m, initial);
entryCountMap.set(m, entry);
}
const countOccursInEntry = (sum, r) => {
if(!r.module) return sum;
return sum + initialChunkChunkMap.get(r.module);
};
const countOccurs = (sum, r) => {
if(!r.module) return sum;
return sum + r.module.getNumberOfChunks();
};
const countOccursInEntry = (sum, r) => {
if (!r.module) {
return sum;
}
return sum + initialChunkChunkMap.get(r.module);
};
const countOccurs = (sum, r) => {
if (!r.module) {
return sum;
}
let factor = 1;
if (typeof r.dependency.getNumberOfIdOccurrences === "function") {
factor = r.dependency.getNumberOfIdOccurrences();
}
if (factor === 0) {
return sum;
}
return sum + factor * r.module.getNumberOfChunks();
};
if(preferEntry) {
modules.forEach(m => {
const result = m.reasons.reduce(countOccursInEntry, 0) + initialChunkChunkMap.get(m) + entryCountMap.get(m);
occursInInitialChunksMap.set(m, result);
if (preferEntry) {
for (const m of modules) {
const result =
m.reasons.reduce(countOccursInEntry, 0) +
initialChunkChunkMap.get(m) +
entryCountMap.get(m);
occursInInitialChunksMap.set(m, result);
}
}
const originalOrder = new Map();
let i = 0;
for (const m of modules) {
const result =
m.reasons.reduce(countOccurs, 0) +
m.getNumberOfChunks() +
entryCountMap.get(m);
occursInAllChunksMap.set(m, result);
originalOrder.set(m, i++);
}
modules.sort((a, b) => {
if (preferEntry) {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
}
const aOccurs = occursInAllChunksMap.get(a);
const bOccurs = occursInAllChunksMap.get(b);
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
);
compilation.hooks.optimizeChunkOrder.tap(
"OccurrenceOrderPlugin",
chunks => {
const occursInInitialChunksMap = new Map();
const originalOrder = new Map();
modules.forEach(m => {
const result = m.reasons.reduce(countOccurs, 0) + m.getNumberOfChunks() + entryCountMap.get(m);
occursInAllChunksMap.set(m, result);
});
let i = 0;
for (const c of chunks) {
let occurs = 0;
for (const chunkGroup of c.groupsIterable) {
for (const parent of chunkGroup.parentsIterable) {
if (parent.isInitial()) occurs++;
}
}
occursInInitialChunksMap.set(c, occurs);
originalOrder.set(c, i++);
}
modules.sort((a, b) => {
if(preferEntry) {
chunks.sort((a, b) => {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if(aEntryOccurs > bEntryOccurs) return -1;
if(aEntryOccurs < bEntryOccurs) return 1;
}
const aOccurs = occursInAllChunksMap.get(a);
const bOccurs = occursInAllChunksMap.get(b);
if(aOccurs > bOccurs) return -1;
if(aOccurs < bOccurs) return 1;
if(a.index > b.index) return 1;
if(a.index < b.index) return -1;
return 0;
});
});
compilation.plugin("optimize-chunk-order", (chunks) => {
const occursInInitialChunksMap = new Map();
chunks.forEach(c => {
const result = c.parents.reduce((sum, p) => {
if(p.isInitial()) return sum + 1;
return sum;
}, 0);
return occursInInitialChunksMap.set(c, result);
});
function occurs(c) {
return c.blocks.length;
if (aEntryOccurs > bEntryOccurs) return -1;
if (aEntryOccurs < bEntryOccurs) return 1;
const aOccurs = a.getNumberOfGroups();
const bOccurs = b.getNumberOfGroups();
if (aOccurs > bOccurs) return -1;
if (aOccurs < bOccurs) return 1;
const orgA = originalOrder.get(a);
const orgB = originalOrder.get(b);
return orgA - orgB;
});
}
chunks.sort((a, b) => {
const aEntryOccurs = occursInInitialChunksMap.get(a);
const bEntryOccurs = occursInInitialChunksMap.get(b);
if(aEntryOccurs > bEntryOccurs) return -1;
if(aEntryOccurs < bEntryOccurs) return 1;
const aOccurs = occurs(a);
const bOccurs = occurs(b);
if(aOccurs > bOccurs) return -1;
if(aOccurs < bOccurs) return 1;
return a.compareTo(b);
});
});
);
});
}
}

View File

@@ -5,16 +5,37 @@
"use strict";
class RemoveEmptyChunksPlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], (chunks) => {
chunks.filter((chunk) => chunk.isEmpty() && !chunk.hasRuntime() && !chunk.hasEntryModule())
.forEach((chunk) => {
compiler.hooks.compilation.tap("RemoveEmptyChunksPlugin", compilation => {
const handler = chunks => {
for (let i = chunks.length - 1; i >= 0; i--) {
const chunk = chunks[i];
if (
chunk.isEmpty() &&
!chunk.hasRuntime() &&
!chunk.hasEntryModule()
) {
chunk.remove("empty");
chunks.splice(chunks.indexOf(chunk), 1);
});
});
chunks.splice(i, 1);
}
}
};
compilation.hooks.optimizeChunksBasic.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeChunksAdvanced.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"RemoveEmptyChunksPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksAdvanced.tap(
"RemoveEmptyChunksPlugin",
handler
);
});
}
}

View File

@@ -4,61 +4,123 @@
*/
"use strict";
function hasModule(chunk, module, checkedChunks) {
if(chunk.containsModule(module)) return [chunk];
if(chunk.parents.length === 0) return false;
return allHaveModule(chunk.parents.filter((c) => {
return !checkedChunks.has(c);
}), module, checkedChunks);
}
const Queue = require("../util/Queue");
const { intersect } = require("../util/SetHelpers");
function allHaveModule(someChunks, module, checkedChunks) {
if(!checkedChunks) checkedChunks = new Set();
var chunks = new Set();
for(var i = 0; i < someChunks.length; i++) {
checkedChunks.add(someChunks[i]);
var subChunks = hasModule(someChunks[i], module, checkedChunks);
if(!subChunks) return false;
const getParentChunksWithModule = (currentChunk, module) => {
const chunks = [];
const stack = new Set(currentChunk.parentsIterable);
for(var index = 0; index < subChunks.length; index++) {
var item = subChunks[index];
chunks.add(item);
for (const chunk of stack) {
if (chunk.containsModule(module)) {
chunks.push(chunk);
} else {
for (const parent of chunk.parentsIterable) {
stack.add(parent);
}
}
}
return chunks;
}
};
class RemoveParentModulesPlugin {
apply(compiler) {
compiler.plugin("compilation", (compilation) => {
compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], (chunks) => {
for(var index = 0; index < chunks.length; index++) {
var chunk = chunks[index];
if(chunk.parents.length === 0) continue;
compiler.hooks.compilation.tap("RemoveParentModulesPlugin", compilation => {
const handler = (chunks, chunkGroups) => {
const queue = new Queue();
const availableModulesMap = new WeakMap();
// TODO consider Map when performance has improved https://gist.github.com/sokra/b36098368da7b8f6792fd7c85fca6311
var cache = Object.create(null);
var modules = chunk.getModules();
for(var i = 0; i < modules.length; i++) {
var module = modules[i];
for (const chunkGroup of compilation.entrypoints.values()) {
// initialize available modules for chunks without parents
availableModulesMap.set(chunkGroup, new Set());
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
var dId = module.getChunkIdsIdent();
var parentChunksWithModule;
if(dId === null) {
parentChunksWithModule = allHaveModule(chunk.parents, module);
} else if(dId in cache) {
parentChunksWithModule = cache[dId];
} else {
parentChunksWithModule = cache[dId] = allHaveModule(chunk.parents, module);
while (queue.length > 0) {
const chunkGroup = queue.dequeue();
let availableModules = availableModulesMap.get(chunkGroup);
let changed = false;
for (const parent of chunkGroup.parentsIterable) {
const availableModulesInParent = availableModulesMap.get(parent);
if (availableModulesInParent !== undefined) {
// If we know the available modules in parent: process these
if (availableModules === undefined) {
// if we have not own info yet: create new entry
availableModules = new Set(availableModulesInParent);
for (const chunk of parent.chunks) {
for (const m of chunk.modulesIterable) {
availableModules.add(m);
}
}
availableModulesMap.set(chunkGroup, availableModules);
changed = true;
} else {
for (const m of availableModules) {
if (
!parent.containsModule(m) &&
!availableModulesInParent.has(m)
) {
availableModules.delete(m);
changed = true;
}
}
}
}
if(parentChunksWithModule) {
module.rewriteChunkInReasons(chunk, Array.from(parentChunksWithModule));
chunk.removeModule(module);
}
if (changed) {
// if something changed: enqueue our children
for (const child of chunkGroup.childrenIterable) {
queue.enqueue(child);
}
}
}
});
// now we have available modules for every chunk
for (const chunk of chunks) {
const availableModulesSets = Array.from(
chunk.groupsIterable,
chunkGroup => availableModulesMap.get(chunkGroup)
);
if (availableModulesSets.some(s => s === undefined)) continue; // No info about this chunk group
const availableModules =
availableModulesSets.length === 1
? availableModulesSets[0]
: intersect(availableModulesSets);
const numberOfModules = chunk.getNumberOfModules();
const toRemove = new Set();
if (numberOfModules < availableModules.size) {
for (const m of chunk.modulesIterable) {
if (availableModules.has(m)) {
toRemove.add(m);
}
}
} else {
for (const m of availableModules) {
if (chunk.containsModule(m)) {
toRemove.add(m);
}
}
}
for (const module of toRemove) {
module.rewriteChunkInReasons(
chunk,
getParentChunksWithModule(chunk, module)
);
chunk.removeModule(module);
}
}
};
compilation.hooks.optimizeChunksBasic.tap(
"RemoveParentModulesPlugin",
handler
);
compilation.hooks.optimizeExtractedChunksBasic.tap(
"RemoveParentModulesPlugin",
handler
);
});
}
}

View File

@@ -1,9 +0,0 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const UglifyJsPlugin = require("uglifyjs-webpack-plugin");
module.exports = UglifyJsPlugin;