composer and npm

This commit is contained in:
Henry Whitaker
2020-08-25 00:59:44 +01:00
parent 6726d93cc6
commit c8f853dc84
2504 changed files with 88530 additions and 41367 deletions

View File

@@ -16,8 +16,89 @@ const HarmonyImportSpecifierDependency = require("../dependencies/HarmonyImportS
* @typedef {Object} ExportInModule
* @property {Module} module the module
* @property {string} exportName the name of the export
* @property {boolean} checked if the export is conditional
*/
/**
* @typedef {Object} ReexportInfo
* @property {Map<string, ExportInModule[]>} static
* @property {Map<Module, Set<string>>} dynamic
*/
/**
* @param {ReexportInfo} info info object
* @param {string} exportName name of export
* @returns {ExportInModule | undefined} static export
*/
const getMappingFromInfo = (info, exportName) => {
const staticMappings = info.static.get(exportName);
if (staticMappings !== undefined) {
if (staticMappings.length === 1) return staticMappings[0];
return undefined;
}
const dynamicMappings = Array.from(info.dynamic).filter(
([_, ignored]) => !ignored.has(exportName)
);
if (dynamicMappings.length === 1) {
return {
module: dynamicMappings[0][0],
exportName,
checked: true
};
}
return undefined;
};
/**
* @param {ReexportInfo} info info object
* @param {string} exportName name of export of source module
* @param {Module} module the target module
* @param {string} innerExportName name of export of target module
* @param {boolean} checked true, if existence of target module is checked
*/
const addStaticReexport = (
info,
exportName,
module,
innerExportName,
checked
) => {
let mappings = info.static.get(exportName);
if (mappings !== undefined) {
for (const mapping of mappings) {
if (mapping.module === module && mapping.exportName === innerExportName) {
mapping.checked = mapping.checked && checked;
return;
}
}
} else {
mappings = [];
info.static.set(exportName, mappings);
}
mappings.push({
module,
exportName: innerExportName,
checked
});
};
/**
* @param {ReexportInfo} info info object
* @param {Module} module the reexport module
* @param {Set<string>} ignored ignore list
* @returns {void}
*/
const addDynamicReexport = (info, module, ignored) => {
const existingList = info.dynamic.get(module);
if (existingList !== undefined) {
for (const key of existingList) {
if (!ignored.has(key)) existingList.delete(key);
}
} else {
info.dynamic.set(module, new Set(ignored));
}
};
class SideEffectsFlagPlugin {
apply(compiler) {
compiler.hooks.normalModuleFactory.tap("SideEffectsFlagPlugin", nmf => {
@@ -52,7 +133,7 @@ class SideEffectsFlagPlugin {
compilation.hooks.optimizeDependencies.tap(
"SideEffectsFlagPlugin",
modules => {
/** @type {Map<Module, Map<string, ExportInModule>>} */
/** @type {Map<Module, ReexportInfo>} */
const reexportMaps = new Map();
// Capture reexports of sideEffectFree modules
@@ -69,16 +150,66 @@ class SideEffectsFlagPlugin {
) {
if (module.factoryMeta.sideEffectFree) {
const mode = dep.getMode(true);
if (mode.type === "safe-reexport") {
let map = reexportMaps.get(module);
if (!map) {
reexportMaps.set(module, (map = new Map()));
if (
mode.type === "safe-reexport" ||
mode.type === "checked-reexport" ||
mode.type === "dynamic-reexport" ||
mode.type === "reexport-non-harmony-default" ||
mode.type === "reexport-non-harmony-default-strict" ||
mode.type === "reexport-named-default"
) {
let info = reexportMaps.get(module);
if (!info) {
reexportMaps.set(
module,
(info = {
static: new Map(),
dynamic: new Map()
})
);
}
for (const pair of mode.map) {
map.set(pair[0], {
module: mode.module,
exportName: pair[1]
});
const targetModule = dep._module;
switch (mode.type) {
case "safe-reexport":
for (const [key, id] of mode.map) {
if (id) {
addStaticReexport(
info,
key,
targetModule,
id,
false
);
}
}
break;
case "checked-reexport":
for (const [key, id] of mode.map) {
if (id) {
addStaticReexport(
info,
key,
targetModule,
id,
true
);
}
}
break;
case "dynamic-reexport":
addDynamicReexport(info, targetModule, mode.ignored);
break;
case "reexport-non-harmony-default":
case "reexport-non-harmony-default-strict":
case "reexport-named-default":
addStaticReexport(
info,
mode.name,
targetModule,
"default",
false
);
break;
}
}
}
@@ -87,17 +218,68 @@ class SideEffectsFlagPlugin {
}
// Flatten reexports
for (const map of reexportMaps.values()) {
for (const pair of map) {
let mapping = pair[1];
while (mapping) {
const innerMap = reexportMaps.get(mapping.module);
if (!innerMap) break;
const newMapping = innerMap.get(mapping.exportName);
if (newMapping) {
map.set(pair[0], newMapping);
for (const info of reexportMaps.values()) {
const dynamicReexports = info.dynamic;
info.dynamic = new Map();
for (const reexport of dynamicReexports) {
let [targetModule, ignored] = reexport;
for (;;) {
const innerInfo = reexportMaps.get(targetModule);
if (!innerInfo) break;
for (const [key, reexports] of innerInfo.static) {
if (ignored.has(key)) continue;
for (const { module, exportName, checked } of reexports) {
addStaticReexport(info, key, module, exportName, checked);
}
}
mapping = newMapping;
// Follow dynamic reexport if there is only one
if (innerInfo.dynamic.size !== 1) {
// When there are more then one, we don't know which one
break;
}
ignored = new Set(ignored);
for (const [innerModule, innerIgnored] of innerInfo.dynamic) {
for (const key of innerIgnored) {
if (ignored.has(key)) continue;
// This reexports ends here
addStaticReexport(info, key, targetModule, key, true);
ignored.add(key);
}
targetModule = innerModule;
}
}
// Update reexport as all other cases has been handled
addDynamicReexport(info, targetModule, ignored);
}
}
for (const info of reexportMaps.values()) {
const staticReexports = info.static;
info.static = new Map();
for (const [key, reexports] of staticReexports) {
for (let mapping of reexports) {
for (;;) {
const innerInfo = reexportMaps.get(mapping.module);
if (!innerInfo) break;
const newMapping = getMappingFromInfo(
innerInfo,
mapping.exportName
);
if (!newMapping) break;
mapping = newMapping;
}
addStaticReexport(
info,
key,
mapping.module,
mapping.exportName,
mapping.checked
);
}
}
}
@@ -105,17 +287,18 @@ class SideEffectsFlagPlugin {
// Update imports along the reexports from sideEffectFree modules
for (const pair of reexportMaps) {
const module = pair[0];
const map = pair[1];
const info = pair[1];
let newReasons = undefined;
for (let i = 0; i < module.reasons.length; i++) {
const reason = module.reasons[i];
const dep = reason.dependency;
if (
dep instanceof HarmonyExportImportedSpecifierDependency ||
(dep instanceof HarmonyImportSpecifierDependency &&
!dep.namespaceObjectAsContext)
(dep instanceof HarmonyExportImportedSpecifierDependency ||
(dep instanceof HarmonyImportSpecifierDependency &&
!dep.namespaceObjectAsContext)) &&
dep._id
) {
const mapping = map.get(dep._id);
const mapping = getMappingFromInfo(info, dep._id);
if (mapping) {
dep.redirectedModule = mapping.module;
dep.redirectedId = mapping.exportName;

View File

@@ -75,12 +75,15 @@ const compareEntries = (a, b) => {
const bSizeReduce = b.size * (b.chunks.size - 1);
const diffSizeReduce = aSizeReduce - bSizeReduce;
if (diffSizeReduce) return diffSizeReduce;
// 4. by number of modules (to be able to compare by identifier)
// 4. by cache group index
const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
if (indexDiff) return indexDiff;
// 5. by number of modules (to be able to compare by identifier)
const modulesA = a.modules;
const modulesB = b.modules;
const diff = modulesA.size - modulesB.size;
if (diff) return diff;
// 5. by module identifiers
// 6. by module identifiers
modulesA.sort();
modulesB.sort();
const aI = modulesA[Symbol.iterator]();
@@ -114,6 +117,7 @@ module.exports = class SplitChunksPlugin {
options.chunks || "all"
),
minSize: options.minSize || 0,
enforceSizeThreshold: options.enforceSizeThreshold || 0,
maxSize: options.maxSize || 0,
minChunks: options.minChunks || 1,
maxAsyncRequests: options.maxAsyncRequests || 1,
@@ -286,6 +290,7 @@ module.exports = class SplitChunksPlugin {
),
enforce: option.enforce,
minSize: option.minSize,
enforceSizeThreshold: option.enforceSizeThreshold,
maxSize: option.maxSize,
minChunks: option.minChunks,
maxAsyncRequests: option.maxAsyncRequests,
@@ -458,8 +463,8 @@ module.exports = class SplitChunksPlugin {
* @typedef {Object} ChunksInfoItem
* @property {SortableSet} modules
* @property {TODO} cacheGroup
* @property {number} cacheGroupIndex
* @property {string} name
* @property {boolean} validateSize
* @property {number} size
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
@@ -473,6 +478,7 @@ module.exports = class SplitChunksPlugin {
/**
* @param {TODO} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {Chunk[]} selectedChunks chunks selected for this module
* @param {string} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module
@@ -480,6 +486,7 @@ module.exports = class SplitChunksPlugin {
*/
const addModuleToChunksInfoMap = (
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
@@ -507,8 +514,8 @@ module.exports = class SplitChunksPlugin {
(info = {
modules: new SortableSet(undefined, sortByIdentifier),
cacheGroup,
cacheGroupIndex,
name,
validateSize: cacheGroup.minSize > 0,
size: 0,
chunks: new Set(),
reuseableChunks: new Set(),
@@ -517,9 +524,7 @@ module.exports = class SplitChunksPlugin {
);
}
info.modules.add(module);
if (info.validateSize) {
info.size += module.size();
}
info.size += module.size();
if (!info.chunksKeys.has(selectedChunksKey)) {
info.chunksKeys.add(selectedChunksKey);
for (const chunk of selectedChunks) {
@@ -544,22 +549,31 @@ module.exports = class SplitChunksPlugin {
combinationsCache.set(chunksKey, combs);
}
let cacheGroupIndex = 0;
for (const cacheGroupSource of cacheGroups) {
const minSize =
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize;
const enforceSizeThreshold =
cacheGroupSource.enforceSizeThreshold !== undefined
? cacheGroupSource.enforceSizeThreshold
: cacheGroupSource.enforce
? 0
: this.options.enforceSizeThreshold;
const cacheGroup = {
key: cacheGroupSource.key,
priority: cacheGroupSource.priority || 0,
chunksFilter:
cacheGroupSource.chunksFilter || this.options.chunksFilter,
minSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize,
minSize,
minSizeForMaxSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: this.options.minSize,
enforceSizeThreshold,
maxSize:
cacheGroupSource.maxSize !== undefined
? cacheGroupSource.maxSize
@@ -596,7 +610,9 @@ module.exports = class SplitChunksPlugin {
cacheGroupSource.automaticNameDelimiter !== undefined
? cacheGroupSource.automaticNameDelimiter
: this.options.automaticNameDelimiter,
reuseExistingChunk: cacheGroupSource.reuseExistingChunk
reuseExistingChunk: cacheGroupSource.reuseExistingChunk,
_validateSize: minSize > 0,
_conditionalEnforce: enforceSizeThreshold > 0
};
// For all combination of chunk selection
for (const chunkCombination of combs) {
@@ -613,18 +629,23 @@ module.exports = class SplitChunksPlugin {
addModuleToChunksInfoMap(
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
);
}
cacheGroupIndex++;
}
}
// Filter items were size < minSize
for (const pair of chunksInfoMap) {
const info = pair[1];
if (info.validateSize && info.size < info.cacheGroup.minSize) {
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(pair[0]);
}
}
@@ -684,24 +705,30 @@ module.exports = class SplitChunksPlugin {
}
// Check if maxRequests condition can be fulfilled
const usedChunks = Array.from(item.chunks).filter(chunk => {
const selectedChunks = Array.from(item.chunks).filter(chunk => {
// skip if we address ourself
return (
(!chunkName || chunk.name !== chunkName) && chunk !== newChunk
);
});
const enforced =
item.cacheGroup._conditionalEnforce &&
item.size >= item.cacheGroup.enforceSizeThreshold;
// Skip when no chunk selected
if (usedChunks.length === 0) continue;
if (selectedChunks.length === 0) continue;
let validChunks = usedChunks;
const usedChunks = new Set(selectedChunks);
// Check if maxRequests condition can be fulfilled
if (
Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests)
!enforced &&
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests))
) {
validChunks = validChunks.filter(chunk => {
// respect max requests when not enforced
for (const chunk of usedChunks) {
// respect max requests
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
: chunk.canBeInitial()
@@ -710,26 +737,33 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup.maxAsyncRequests
)
: item.cacheGroup.maxAsyncRequests;
return (
!isFinite(maxRequests) || getRequests(chunk) < maxRequests
);
});
if (
isFinite(maxRequests) &&
getRequests(chunk) >= maxRequests
) {
usedChunks.delete(chunk);
}
}
}
validChunks = validChunks.filter(chunk => {
outer: for (const chunk of usedChunks) {
for (const module of item.modules) {
if (chunk.containsModule(module)) return true;
if (chunk.containsModule(module)) continue outer;
}
return false;
});
usedChunks.delete(chunk);
}
if (validChunks.length < usedChunks.length) {
if (validChunks.length >= item.cacheGroup.minChunks) {
// Were some (invalid) chunks removed from usedChunks?
// => readd all modules to the queue, as things could have been changed
if (usedChunks.size < selectedChunks.length) {
if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
validChunks,
getKey(validChunks),
item.cacheGroupIndex,
chunksArr,
getKey(usedChunks),
module
);
}
@@ -819,28 +853,24 @@ module.exports = class SplitChunksPlugin {
// remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) {
if (isOverlap(info.chunks, item.chunks)) {
if (info.validateSize) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size !== oldSize) {
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
continue;
}
if (info.modules.size !== oldSize) {
info.size = getModulesSize(info.modules);
if (info.size < info.cacheGroup.minSize) {
chunksInfoMap.delete(key);
}
}
} else {
// only update the modules
for (const module of item.modules) {
info.modules.delete(module);
info.size = getModulesSize(info.modules);
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(key);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);