From 3bd962a6d7f61239c020e2dbbeb7341e5b842dd1 Mon Sep 17 00:00:00 2001
From: WXL <wl_5969728@163.com>
Date: 星期二, 21 四月 2026 11:46:41 +0800
Subject: [PATCH] 推送
---
node_modules/webpack/lib/optimize/SplitChunksPlugin.js | 182 ++++++++++++++++++++++++++++++++------------
1 files changed, 131 insertions(+), 51 deletions(-)
diff --git a/node_modules/webpack/lib/optimize/SplitChunksPlugin.js b/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
index 229adb2..10884bd 100644
--- a/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
+++ b/node_modules/webpack/lib/optimize/SplitChunksPlugin.js
@@ -22,7 +22,6 @@
const MinMaxSizeWarning = require("./MinMaxSizeWarning");
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */
-/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
/** @typedef {import("../config/defaults").OutputNormalizedWithDefaults} OutputOptions */
@@ -31,33 +30,42 @@
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Module")} Module */
+/** @typedef {import("../Module").SourceType} SourceType */
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */
/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
+/** @typedef {import("../util/deterministicGrouping").Sizes} Sizes */
/**
+ * Defines the chunk filter fn callback.
* @callback ChunkFilterFn
* @param {Chunk} chunk
* @returns {boolean | undefined}
*/
+/** @typedef {number} Priority */
+/** @typedef {number} Size */
+/** @typedef {number} CountOfChunk */
+/** @typedef {number} CountOfRequest */
+
/**
+ * Defines the combine size function callback.
* @callback CombineSizeFunction
- * @param {number} a
- * @param {number} b
- * @returns {number}
+ * @param {Size} a
+ * @param {Size} b
+ * @returns {Size}
*/
-/** @typedef {string} SourceType */
/** @typedef {SourceType[]} SourceTypes */
/** @typedef {SourceType[]} DefaultSizeTypes */
-/** @typedef {Record<SourceType, number>} SplitChunksSizes */
+/** @typedef {Record<SourceType, Size>} SplitChunksSizes */
/**
+ * Defines the cache group source type used by this module.
* @typedef {object} CacheGroupSource
* @property {string} key
- * @property {number=} priority
+ * @property {Priority=} priority
* @property {GetNameFn=} getName
* @property {ChunkFilterFn=} chunksFilter
* @property {boolean=} enforce
@@ -67,9 +75,9 @@
* @property {SplitChunksSizes} enforceSizeThreshold
* @property {SplitChunksSizes} maxAsyncSize
* @property {SplitChunksSizes} maxInitialSize
- * @property {number=} minChunks
- * @property {number=} maxAsyncRequests
- * @property {number=} maxInitialRequests
+ * @property {CountOfChunk=} minChunks
+ * @property {CountOfRequest=} maxAsyncRequests
+ * @property {CountOfRequest=} maxInitialRequests
* @property {TemplatePath=} filename
* @property {string=} idHint
* @property {string=} automaticNameDelimiter
@@ -78,9 +86,10 @@
*/
/**
+ * Defines the cache group type used by this module.
* @typedef {object} CacheGroup
* @property {string} key
- * @property {number} priority
+ * @property {Priority} priority
* @property {GetNameFn=} getName
* @property {ChunkFilterFn} chunksFilter
* @property {SplitChunksSizes} minSize
@@ -89,9 +98,9 @@
* @property {SplitChunksSizes} enforceSizeThreshold
* @property {SplitChunksSizes} maxAsyncSize
* @property {SplitChunksSizes} maxInitialSize
- * @property {number} minChunks
- * @property {number} maxAsyncRequests
- * @property {number} maxInitialRequests
+ * @property {CountOfChunk} minChunks
+ * @property {CountOfRequest} maxAsyncRequests
+ * @property {CountOfRequest} maxInitialRequests
* @property {TemplatePath=} filename
* @property {string} idHint
* @property {string} automaticNameDelimiter
@@ -104,6 +113,7 @@
*/
/**
+ * Defines the fallback cache group type used by this module.
* @typedef {object} FallbackCacheGroup
* @property {ChunkFilterFn} chunksFilter
* @property {SplitChunksSizes} minSize
@@ -113,12 +123,16 @@
*/
/**
+ * Defines the cache groups context type used by this module.
* @typedef {object} CacheGroupsContext
* @property {ModuleGraph} moduleGraph
* @property {ChunkGraph} chunkGraph
*/
+/** @typedef {(module: Module) => OptimizationSplitChunksCacheGroup | OptimizationSplitChunksCacheGroup[] | void} RawGetCacheGroups */
+
/**
+ * Defines the get cache groups callback.
* @callback GetCacheGroups
* @param {Module} module
* @param {CacheGroupsContext} context
@@ -126,6 +140,7 @@
*/
/**
+ * Defines the get name fn callback.
* @callback GetNameFn
* @param {Module} module
* @param {Chunk[]} chunks
@@ -134,6 +149,7 @@
*/
/**
+ * Defines the split chunks options type used by this module.
* @typedef {object} SplitChunksOptions
* @property {ChunkFilterFn} chunksFilter
* @property {DefaultSizeTypes} defaultSizeTypes
@@ -143,9 +159,9 @@
* @property {SplitChunksSizes} enforceSizeThreshold
* @property {SplitChunksSizes} maxInitialSize
* @property {SplitChunksSizes} maxAsyncSize
- * @property {number} minChunks
- * @property {number} maxAsyncRequests
- * @property {number} maxInitialRequests
+ * @property {CountOfChunk} minChunks
+ * @property {CountOfRequest} maxAsyncRequests
+ * @property {CountOfRequest} maxInitialRequests
* @property {boolean} hidePathInfo
* @property {TemplatePath=} filename
* @property {string} automaticNameDelimiter
@@ -155,15 +171,18 @@
* @property {FallbackCacheGroup} fallbackCacheGroup
*/
+/** @typedef {Set<Chunk>} ChunkSet */
+
/**
+ * Defines the chunks info item type used by this module.
* @typedef {object} ChunksInfoItem
* @property {SortableSet<Module>} modules
* @property {CacheGroup} cacheGroup
* @property {number} cacheGroupIndex
* @property {string=} name
- * @property {Record<SourceType, number>} sizes
- * @property {Set<Chunk>} chunks
- * @property {Set<Chunk>} reusableChunks
+ * @property {SplitChunksSizes} sizes
+ * @property {ChunkSet} chunks
+ * @property {ChunkSet} reusableChunks
* @property {Set<bigint | Chunk>} chunksKeys
*/
@@ -178,6 +197,7 @@
const getKeyCache = new WeakMap();
/**
+ * Returns hashed filename.
* @param {string} name a filename to hash
* @param {OutputOptions} outputOptions hash function used
* @returns {string} hashed filename
@@ -194,8 +214,9 @@
};
/**
+ * Returns the number of requests.
* @param {Chunk} chunk the chunk
- * @returns {number} the number of requests
+ * @returns {CountOfRequest} the number of requests
*/
const getRequests = (chunk) => {
let requests = 0;
@@ -206,16 +227,18 @@
};
/**
+ * Returns result.
* @template {object} T
* @template {object} R
* @param {T} obj obj an object
- * @param {function(T[keyof T], keyof T): T[keyof T]} fn fn
+ * @param {(obj: T[keyof T], key: keyof T) => T[keyof T]} fn fn
* @returns {T} result
*/
const mapObject = (obj, fn) => {
+ /** @type {T} */
const newObj = Object.create(null);
for (const key of Object.keys(obj)) {
- newObj[key] = fn(
+ newObj[/** @type {keyof T} */ (key)] = fn(
obj[/** @type {keyof T} */ (key)],
/** @type {keyof T} */
(key)
@@ -225,6 +248,7 @@
};
/**
+ * Checks whether this object is overlap.
* @template T
* @param {Set<T>} a set
* @param {Set<T>} b other set
@@ -240,6 +264,7 @@
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
/**
+ * Compares the provided values and returns their ordering.
* @param {ChunksInfoItem} a item
* @param {ChunksInfoItem} b item
* @returns {number} compare result
@@ -271,29 +296,33 @@
};
/**
+ * Initial chunk filter.
* @param {Chunk} chunk the chunk
* @returns {boolean} true, if the chunk is an entry chunk
*/
const INITIAL_CHUNK_FILTER = (chunk) => chunk.canBeInitial();
/**
+ * Async chunk filter.
* @param {Chunk} chunk the chunk
* @returns {boolean} true, if the chunk is an async chunk
*/
const ASYNC_CHUNK_FILTER = (chunk) => !chunk.canBeInitial();
/**
+ * Returns always true.
* @param {Chunk} _chunk the chunk
* @returns {boolean} always true
*/
const ALL_CHUNK_FILTER = (_chunk) => true;
/**
+ * Returns normalized representation.
* @param {OptimizationSplitChunksSizes | undefined} value the sizes
* @param {DefaultSizeTypes} defaultSizeTypes the default size types
* @returns {SplitChunksSizes} normalized representation
*/
const normalizeSizes = (value, defaultSizeTypes) => {
if (typeof value === "number") {
- /** @type {Record<string, number>} */
+ /** @type {SplitChunksSizes} */
const o = {};
for (const sizeType of defaultSizeTypes) o[sizeType] = value;
return o;
@@ -304,6 +333,7 @@
};
/**
+ * Merges the provided values into a single result.
* @param {...(SplitChunksSizes | undefined)} sizes the sizes
* @returns {SplitChunksSizes} the merged sizes
*/
@@ -317,25 +347,27 @@
};
/**
+ * Checks whether this object contains the size.
* @param {SplitChunksSizes} sizes the sizes
* @returns {boolean} true, if there are sizes > 0
*/
const hasNonZeroSizes = (sizes) => {
- for (const key of Object.keys(sizes)) {
+ for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
if (sizes[key] > 0) return true;
}
return false;
};
/**
+ * Returns the combine sizes.
* @param {SplitChunksSizes} a first sizes
* @param {SplitChunksSizes} b second sizes
* @param {CombineSizeFunction} combine a function to combine sizes
* @returns {SplitChunksSizes} the combine sizes
*/
const combineSizes = (a, b, combine) => {
- const aKeys = new Set(Object.keys(a));
- const bKeys = new Set(Object.keys(b));
+ const aKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(a)));
+ const bKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(b)));
/** @type {SplitChunksSizes} */
const result = {};
for (const key of aKeys) {
@@ -350,12 +382,13 @@
};
/**
+ * Checks true if there are sizes and all existing sizes are at least minSize.
* @param {SplitChunksSizes} sizes the sizes
* @param {SplitChunksSizes} minSize the min sizes
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
*/
const checkMinSize = (sizes, minSize) => {
- for (const key of Object.keys(minSize)) {
+ for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
const size = sizes[key];
if (size === undefined || size === 0) continue;
if (size < minSize[key]) return false;
@@ -364,13 +397,16 @@
};
/**
+ * Checks min size reduction.
* @param {SplitChunksSizes} sizes the sizes
* @param {SplitChunksSizes} minSizeReduction the min sizes
- * @param {number} chunkCount number of chunks
+ * @param {CountOfChunk} chunkCount number of chunks
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
*/
const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
- for (const key of Object.keys(minSizeReduction)) {
+ for (const key of /** @type {SourceType[]} */ (
+ Object.keys(minSizeReduction)
+ )) {
const size = sizes[key];
if (size === undefined || size === 0) continue;
if (size * chunkCount < minSizeReduction[key]) return false;
@@ -379,13 +415,15 @@
};
/**
+ * Gets violating min sizes.
* @param {SplitChunksSizes} sizes the sizes
* @param {SplitChunksSizes} minSize the min sizes
* @returns {undefined | SourceTypes} list of size types that are below min size
*/
const getViolatingMinSizes = (sizes, minSize) => {
+ /** @type {SourceTypes | undefined} */
let list;
- for (const key of Object.keys(minSize)) {
+ for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
const size = sizes[key];
if (size === undefined || size === 0) continue;
if (size < minSize[key]) {
@@ -397,18 +435,20 @@
};
/**
+ * Returns the total size.
* @param {SplitChunksSizes} sizes the sizes
- * @returns {number} the total size
+ * @returns {Size} the total size
*/
const totalSize = (sizes) => {
let size = 0;
- for (const key of Object.keys(sizes)) {
+ for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
size += sizes[key];
}
return size;
};
/**
+ * Returns a function to get the name of the chunk.
* @param {OptimizationSplitChunksCacheGroup["name"]} name the chunk name
* @returns {GetNameFn | undefined} a function to get the name of the chunk
*/
@@ -422,6 +462,7 @@
};
/**
+ * Normalizes chunks filter.
* @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option
* @returns {ChunkFilterFn | undefined} the chunk filter function
*/
@@ -444,7 +485,8 @@
};
/**
- * @param {undefined | GetCacheGroups | Record<string, false | string | RegExp | OptimizationSplitChunksGetCacheGroups | OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
+ * Normalizes cache groups.
+ * @param {undefined | GetCacheGroups | Record<string, false | string | RegExp | RawGetCacheGroups | OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
* @param {DefaultSizeTypes} defaultSizeTypes the default size types
* @returns {GetCacheGroups} a function to get the cache groups
*/
@@ -468,6 +510,7 @@
}
});
} else if (typeof option === "function") {
+ /** @type {WeakMap<OptimizationSplitChunksCacheGroup, CacheGroupSource>} */
const cache = new WeakMap();
handlers.push((module, context, results) => {
const result = option(module);
@@ -503,6 +546,7 @@
}
}
/**
+ * Returns the matching cache groups.
* @param {Module} module the current module
* @param {CacheGroupsContext} context the current context
* @returns {CacheGroupSource[]} the matching cache groups
@@ -520,7 +564,10 @@
return () => null;
};
+/** @typedef {(module: Module, context: CacheGroupsContext) => boolean} CheckTestFn */
+
/**
+ * Checks true, if the module should be selected.
* @param {OptimizationSplitChunksCacheGroup["test"]} test test option
* @param {Module} module the module
* @param {CacheGroupsContext} context context object
@@ -543,7 +590,10 @@
return false;
};
+/** @typedef {(type: string) => boolean} CheckModuleTypeFn */
+
/**
+ * Checks module type.
* @param {OptimizationSplitChunksCacheGroup["type"]} test type option
* @param {Module} module the module
* @returns {boolean} true, if the module should be selected
@@ -564,7 +614,10 @@
return false;
};
+/** @typedef {(layer: string | null) => boolean} CheckModuleLayerFn */
+
/**
+ * Checks module layer.
* @param {OptimizationSplitChunksCacheGroup["layer"]} test type option
* @param {Module} module the module
* @returns {boolean} true, if the module should be selected
@@ -586,6 +639,7 @@
};
/**
+ * Creates a cache group source.
* @param {OptimizationSplitChunksCacheGroup} options the group options
* @param {string} key key of cache group
* @param {DefaultSizeTypes} defaultSizeTypes the default size types
@@ -637,6 +691,7 @@
module.exports = class SplitChunksPlugin {
/**
+ * Creates an instance of SplitChunksPlugin.
* @param {OptimizationSplitChunksOptions=} options plugin options
*/
constructor(options = {}) {
@@ -726,6 +781,7 @@
}
/**
+ * Returns the cache group (cached).
* @param {CacheGroupSource} cacheGroupSource source
* @returns {CacheGroup} the cache group (cached)
*/
@@ -817,7 +873,7 @@
}
/**
- * Apply the plugin
+ * Applies the plugin by registering its hooks on the compiler.
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
@@ -858,7 +914,8 @@
index <<= ONE;
}
/**
- * @param {Iterable<Chunk>} chunks list of chunks
+ * Returns key of the chunks.
+ * @param {Iterable<Chunk, undefined, undefined>} chunks list of chunks
* @returns {bigint | Chunk} key of the chunks
*/
const getKey = (chunks) => {
@@ -878,6 +935,7 @@
return key;
};
/**
+ * Returns stringified key.
* @param {bigint | Chunk} key key of the chunks
* @returns {string} stringified key
*/
@@ -887,9 +945,9 @@
};
const getChunkSetsInGraph = memoize(() => {
- /** @type {Map<bigint, Set<Chunk>>} */
+ /** @type {Map<bigint, ChunkSet>} */
const chunkSetsInGraph = new Map();
- /** @type {Set<Chunk>} */
+ /** @type {ChunkSet} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const chunks = chunkGraph.getModuleChunksIterable(module);
@@ -906,11 +964,13 @@
});
/**
+ * Group chunks by exports.
* @param {Module} module the module
* @returns {Iterable<Chunk[]>} groups of chunks with equal exports
*/
const groupChunksByExports = (module) => {
const exportsInfo = moduleGraph.getExportsInfo(module);
+ /** @type {Map<string, Chunk[]>} */
const groupedByUsedExports = new Map();
for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
const key = exportsInfo.getUsageKey(chunk.runtime);
@@ -927,10 +987,12 @@
/** @type {Map<Module, Iterable<Chunk[]>>} */
const groupedByExportsMap = new Map();
+ /** @typedef {Map<bigint | Chunk, ChunkSet>} ChunkSetsInGraph */
+
const getExportsChunkSetsInGraph = memoize(() => {
- /** @type {Map<bigint | Chunk, Set<Chunk>>} */
+ /** @type {ChunkSetsInGraph} */
const chunkSetsInGraph = new Map();
- /** @type {Set<Chunk>} */
+ /** @type {ChunkSet} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const groupedChunks = [...groupChunksByExports(module)];
@@ -949,13 +1011,14 @@
return { chunkSetsInGraph, singleChunkSets };
});
- /** @typedef {Map<number, Set<Chunk>[]>} ChunkSetsByCount */
+ /** @typedef {Map<CountOfChunk, ChunkSet[]>} ChunkSetsByCount */
// group these set of chunks by count
// to allow to check less sets via isSubset
// (only smaller sets can be subset)
/**
- * @param {IterableIterator<Set<Chunk>>} chunkSets set of sets of chunks
+ * Group chunk sets by count.
+ * @param {IterableIterator<ChunkSet>} chunkSets set of sets of chunks
* @returns {ChunkSetsByCount} map of sets of chunks by count
*/
const groupChunkSetsByCount = (chunkSets) => {
@@ -983,13 +1046,14 @@
)
);
- /** @typedef {(Set<Chunk> | Chunk)[]} Combinations */
+ /** @typedef {(ChunkSet | Chunk)[]} Combinations */
// Create a list of possible combinations
/**
- * @param {Map<bigint | Chunk, Set<Chunk>>} chunkSets chunk sets
- * @param {Set<Chunk>} singleChunkSets single chunks sets
- * @param {Map<number, Set<Chunk>[]>} chunkSetsByCount chunk sets by count
+ * Creates a get combinations.
+ * @param {ChunkSetsInGraph} chunkSets chunk sets
+ * @param {ChunkSet} singleChunkSets single chunks sets
+ * @param {ChunkSetsByCount} chunkSetsByCount chunk sets by count
* @returns {(key: bigint | Chunk) => Combinations} combinations
*/
const createGetCombinations = (
@@ -1009,7 +1073,7 @@
return result;
}
const chunksSet =
- /** @type {Set<Chunk>} */
+ /** @type {ChunkSet} */
(chunkSets.get(key));
/** @type {Combinations} */
const array = [chunksSet];
@@ -1043,6 +1107,7 @@
});
/**
+ * Returns combinations by key.
* @param {bigint | Chunk} key key
* @returns {Combinations} combinations by key
*/
@@ -1058,6 +1123,7 @@
);
});
/**
+ * Gets exports combinations.
* @param {bigint | Chunk} key key
* @returns {Combinations} exports combinations by key
*/
@@ -1065,24 +1131,27 @@
getExportsCombinationsFactory()(key);
/**
+ * Defines the selected chunks result type used by this module.
* @typedef {object} SelectedChunksResult
* @property {Chunk[]} chunks the list of chunks
* @property {bigint | Chunk} key a key of the list
*/
- /** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */
+ /** @typedef {WeakMap<ChunkFilterFn, SelectedChunksResult>} ChunkMap */
+ /** @type {WeakMap<ChunkSet | Chunk, ChunkMap>} */
const selectedChunksCacheByChunksSet = new WeakMap();
/**
* get list and key by applying the filter function to the list
* It is cached for performance reasons
- * @param {Set<Chunk> | Chunk} chunks list of chunks
+ * @param {ChunkSet | Chunk} chunks list of chunks
* @param {ChunkFilterFn} chunkFilter filter function for chunks
* @returns {SelectedChunksResult} list and key
*/
const getSelectedChunks = (chunks, chunkFilter) => {
let entry = selectedChunksCacheByChunksSet.get(chunks);
if (entry === undefined) {
+ /** @type {ChunkMap} */
entry = new WeakMap();
selectedChunksCacheByChunksSet.set(chunks, entry);
}
@@ -1119,6 +1188,7 @@
const chunksInfoMap = new Map();
/**
+ * Adds module to chunks info map.
* @param {CacheGroup} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {Chunk[]} selectedChunks chunks selected for this module
@@ -1269,7 +1339,7 @@
const getCombsByUsedExports = memoize(() => {
// fill the groupedByExportsMap
getExportsChunkSetsInGraph();
- /** @type {Set<Set<Chunk> | Chunk>} */
+ /** @type {Set<ChunkSet | Chunk>} */
const set = new Set();
const groupedByUsedExports =
/** @type {Iterable<Chunk[]>} */
@@ -1321,6 +1391,7 @@
logger.time("queue");
/**
+ * Removes modules with source type.
* @param {ChunksInfoItem} info entry
* @param {SourceTypes} sourceTypes source types to be removed
*/
@@ -1337,6 +1408,7 @@
};
/**
+ * Removes min size violating modules.
* @param {ChunksInfoItem} info entry
* @returns {boolean} true, if entry become empty
*/
@@ -1367,6 +1439,7 @@
}
/**
+ * Defines the max size queue item type used by this module.
* @typedef {object} MaxSizeQueueItem
* @property {SplitChunksSizes} minSize
* @property {SplitChunksSizes} maxAsyncSize
@@ -1380,7 +1453,9 @@
while (chunksInfoMap.size > 0) {
// Find best matching entry
+ /** @type {undefined | string} */
let bestEntryKey;
+ /** @type {undefined | ChunksInfoItem} */
let bestEntry;
for (const pair of chunksInfoMap) {
const key = pair[0];
@@ -1459,6 +1534,7 @@
item.cacheGroup._conditionalEnforce &&
checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
+ /** @type {Set<Chunk>} */
const usedChunks = new Set(item.chunks);
// Check if maxRequests condition can be fulfilled
@@ -1521,6 +1597,7 @@
usedChunks.size === 1
) {
const [chunk] = usedChunks;
+ /** @type {SplitChunksSizes} */
const chunkSizes = Object.create(null);
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
if (!item.modules.has(module)) {
@@ -1703,7 +1780,9 @@
if (Object.keys(maxSize).length === 0) {
continue;
}
- for (const key of Object.keys(maxSize)) {
+ for (const key of /** @type {SourceType[]} */ (
+ Object.keys(maxSize)
+ )) {
const maxSizeValue = maxSize[key];
const minSizeValue = minSize[key];
if (
@@ -1749,6 +1828,7 @@
return key;
},
getSize(module) {
+ /** @type {Sizes} */
const size = Object.create(null);
for (const key of module.getSourceTypes()) {
size[key] = module.size(key);
--
Gitblit v1.9.3