| | |
| | | const MinMaxSizeWarning = require("./MinMaxSizeWarning"); |
| | | |
| | | /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */ |
| | | /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */ |
| | | /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */ |
| | | /** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */ |
| | | /** @typedef {import("../config/defaults").OutputNormalizedWithDefaults} OutputOptions */ |
| | |
| | | /** @typedef {import("../ChunkGroup")} ChunkGroup */ |
| | | /** @typedef {import("../Compiler")} Compiler */ |
| | | /** @typedef {import("../Module")} Module */ |
| | | /** @typedef {import("../Module").SourceType} SourceType */ |
| | | /** @typedef {import("../ModuleGraph")} ModuleGraph */ |
| | | /** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */ |
| | | /** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */ |
| | | /** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */ |
| | | /** @typedef {import("../util/deterministicGrouping").Sizes} Sizes */ |
| | | |
| | | /** |
| | | * Defines the chunk filter fn callback. |
| | | * @callback ChunkFilterFn |
| | | * @param {Chunk} chunk |
| | | * @returns {boolean | undefined} |
| | | */ |
| | | |
| | | /** @typedef {number} Priority */ |
| | | /** @typedef {number} Size */ |
| | | /** @typedef {number} CountOfChunk */ |
| | | /** @typedef {number} CountOfRequest */ |
| | | |
| | | /** |
| | | * Defines the combine size function callback. |
| | | * @callback CombineSizeFunction |
| | | * @param {number} a |
| | | * @param {number} b |
| | | * @returns {number} |
| | | * @param {Size} a |
| | | * @param {Size} b |
| | | * @returns {Size} |
| | | */ |
| | | |
| | | /** @typedef {string} SourceType */ |
| | | /** @typedef {SourceType[]} SourceTypes */ |
| | | /** @typedef {SourceType[]} DefaultSizeTypes */ |
| | | /** @typedef {Record<SourceType, number>} SplitChunksSizes */ |
| | | /** @typedef {Record<SourceType, Size>} SplitChunksSizes */ |
| | | |
| | | /** |
| | | * Defines the cache group source type used by this module. |
| | | * @typedef {object} CacheGroupSource |
| | | * @property {string} key |
| | | * @property {number=} priority |
| | | * @property {Priority=} priority |
| | | * @property {GetNameFn=} getName |
| | | * @property {ChunkFilterFn=} chunksFilter |
| | | * @property {boolean=} enforce |
| | |
| | | * @property {SplitChunksSizes} enforceSizeThreshold |
| | | * @property {SplitChunksSizes} maxAsyncSize |
| | | * @property {SplitChunksSizes} maxInitialSize |
| | | * @property {number=} minChunks |
| | | * @property {number=} maxAsyncRequests |
| | | * @property {number=} maxInitialRequests |
| | | * @property {CountOfChunk=} minChunks |
| | | * @property {CountOfRequest=} maxAsyncRequests |
| | | * @property {CountOfRequest=} maxInitialRequests |
| | | * @property {TemplatePath=} filename |
| | | * @property {string=} idHint |
| | | * @property {string=} automaticNameDelimiter |
| | |
| | | */ |
| | | |
| | | /** |
| | | * Defines the cache group type used by this module. |
| | | * @typedef {object} CacheGroup |
| | | * @property {string} key |
| | | * @property {number} priority |
| | | * @property {Priority} priority |
| | | * @property {GetNameFn=} getName |
| | | * @property {ChunkFilterFn} chunksFilter |
| | | * @property {SplitChunksSizes} minSize |
| | |
| | | * @property {SplitChunksSizes} enforceSizeThreshold |
| | | * @property {SplitChunksSizes} maxAsyncSize |
| | | * @property {SplitChunksSizes} maxInitialSize |
| | | * @property {number} minChunks |
| | | * @property {number} maxAsyncRequests |
| | | * @property {number} maxInitialRequests |
| | | * @property {CountOfChunk} minChunks |
| | | * @property {CountOfRequest} maxAsyncRequests |
| | | * @property {CountOfRequest} maxInitialRequests |
| | | * @property {TemplatePath=} filename |
| | | * @property {string} idHint |
| | | * @property {string} automaticNameDelimiter |
| | |
| | | */ |
| | | |
| | | /** |
| | | * Defines the fallback cache group type used by this module. |
| | | * @typedef {object} FallbackCacheGroup |
| | | * @property {ChunkFilterFn} chunksFilter |
| | | * @property {SplitChunksSizes} minSize |
| | |
| | | */ |
| | | |
| | | /** |
| | | * Defines the cache groups context type used by this module. |
| | | * @typedef {object} CacheGroupsContext |
| | | * @property {ModuleGraph} moduleGraph |
| | | * @property {ChunkGraph} chunkGraph |
| | | */ |
| | | |
| | | /** @typedef {(module: Module) => OptimizationSplitChunksCacheGroup | OptimizationSplitChunksCacheGroup[] | void} RawGetCacheGroups */ |
| | | |
| | | /** |
| | | * Defines the get cache groups callback. |
| | | * @callback GetCacheGroups |
| | | * @param {Module} module |
| | | * @param {CacheGroupsContext} context |
| | |
| | | */ |
| | | |
| | | /** |
| | | * Defines the get name fn callback. |
| | | * @callback GetNameFn |
| | | * @param {Module} module |
| | | * @param {Chunk[]} chunks |
| | |
| | | */ |
| | | |
| | | /** |
| | | * Defines the split chunks options type used by this module. |
| | | * @typedef {object} SplitChunksOptions |
| | | * @property {ChunkFilterFn} chunksFilter |
| | | * @property {DefaultSizeTypes} defaultSizeTypes |
| | |
| | | * @property {SplitChunksSizes} enforceSizeThreshold |
| | | * @property {SplitChunksSizes} maxInitialSize |
| | | * @property {SplitChunksSizes} maxAsyncSize |
| | | * @property {number} minChunks |
| | | * @property {number} maxAsyncRequests |
| | | * @property {number} maxInitialRequests |
| | | * @property {CountOfChunk} minChunks |
| | | * @property {CountOfRequest} maxAsyncRequests |
| | | * @property {CountOfRequest} maxInitialRequests |
| | | * @property {boolean} hidePathInfo |
| | | * @property {TemplatePath=} filename |
| | | * @property {string} automaticNameDelimiter |
| | |
| | | * @property {FallbackCacheGroup} fallbackCacheGroup |
| | | */ |
| | | |
| | | /** @typedef {Set<Chunk>} ChunkSet */ |
| | | |
| | | /** |
| | | * Defines the chunks info item type used by this module. |
| | | * @typedef {object} ChunksInfoItem |
| | | * @property {SortableSet<Module>} modules |
| | | * @property {CacheGroup} cacheGroup |
| | | * @property {number} cacheGroupIndex |
| | | * @property {string=} name |
| | | * @property {Record<SourceType, number>} sizes |
| | | * @property {Set<Chunk>} chunks |
| | | * @property {Set<Chunk>} reusableChunks |
| | | * @property {SplitChunksSizes} sizes |
| | | * @property {ChunkSet} chunks |
| | | * @property {ChunkSet} reusableChunks |
| | | * @property {Set<bigint | Chunk>} chunksKeys |
| | | */ |
| | | |
| | |
| | | const getKeyCache = new WeakMap(); |
| | | |
| | | /** |
| | | * Returns hashed filename. |
| | | * @param {string} name a filename to hash |
| | | * @param {OutputOptions} outputOptions hash function used |
| | | * @returns {string} hashed filename |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Returns the number of requests. |
| | | * @param {Chunk} chunk the chunk |
| | | * @returns {number} the number of requests |
| | | * @returns {CountOfRequest} the number of requests |
| | | */ |
| | | const getRequests = (chunk) => { |
| | | let requests = 0; |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Returns result. |
| | | * @template {object} T |
| | | * @template {object} R |
| | | * @param {T} obj obj an object |
| | | * @param {function(T[keyof T], keyof T): T[keyof T]} fn fn |
| | | * @param {(obj: T[keyof T], key: keyof T) => T[keyof T]} fn fn |
| | | * @returns {T} result |
| | | */ |
| | | const mapObject = (obj, fn) => { |
| | | /** @type {T} */ |
| | | const newObj = Object.create(null); |
| | | for (const key of Object.keys(obj)) { |
| | | newObj[key] = fn( |
| | | newObj[/** @type {keyof T} */ (key)] = fn( |
| | | obj[/** @type {keyof T} */ (key)], |
| | | /** @type {keyof T} */ |
| | | (key) |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Checks whether this object is overlap. |
| | | * @template T |
| | | * @param {Set<T>} a set |
| | | * @param {Set<T>} b other set |
| | |
| | | const compareModuleIterables = compareIterables(compareModulesByIdentifier); |
| | | |
| | | /** |
| | | * Compares the provided values and returns their ordering. |
| | | * @param {ChunksInfoItem} a item |
| | | * @param {ChunksInfoItem} b item |
| | | * @returns {number} compare result |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Initial chunk filter. |
| | | * @param {Chunk} chunk the chunk |
| | | * @returns {boolean} true, if the chunk is an entry chunk |
| | | */ |
| | | const INITIAL_CHUNK_FILTER = (chunk) => chunk.canBeInitial(); |
| | | /** |
| | | * Async chunk filter. |
| | | * @param {Chunk} chunk the chunk |
| | | * @returns {boolean} true, if the chunk is an async chunk |
| | | */ |
| | | const ASYNC_CHUNK_FILTER = (chunk) => !chunk.canBeInitial(); |
| | | /** |
| | | * Returns always true. |
| | | * @param {Chunk} _chunk the chunk |
| | | * @returns {boolean} always true |
| | | */ |
| | | const ALL_CHUNK_FILTER = (_chunk) => true; |
| | | |
| | | /** |
| | | * Returns normalized representation. |
| | | * @param {OptimizationSplitChunksSizes | undefined} value the sizes |
| | | * @param {DefaultSizeTypes} defaultSizeTypes the default size types |
| | | * @returns {SplitChunksSizes} normalized representation |
| | | */ |
| | | const normalizeSizes = (value, defaultSizeTypes) => { |
| | | if (typeof value === "number") { |
| | | /** @type {Record<string, number>} */ |
| | | /** @type {SplitChunksSizes} */ |
| | | const o = {}; |
| | | for (const sizeType of defaultSizeTypes) o[sizeType] = value; |
| | | return o; |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Merges the provided values into a single result. |
| | | * @param {...(SplitChunksSizes | undefined)} sizes the sizes |
| | | * @returns {SplitChunksSizes} the merged sizes |
| | | */ |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Checks whether this object contains the size. |
| | | * @param {SplitChunksSizes} sizes the sizes |
| | | * @returns {boolean} true, if there are sizes > 0 |
| | | */ |
| | | const hasNonZeroSizes = (sizes) => { |
| | | for (const key of Object.keys(sizes)) { |
| | | for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) { |
| | | if (sizes[key] > 0) return true; |
| | | } |
| | | return false; |
| | | }; |
| | | |
| | | /** |
| | | * Returns the combine sizes. |
| | | * @param {SplitChunksSizes} a first sizes |
| | | * @param {SplitChunksSizes} b second sizes |
| | | * @param {CombineSizeFunction} combine a function to combine sizes |
| | | * @returns {SplitChunksSizes} the combine sizes |
| | | */ |
| | | const combineSizes = (a, b, combine) => { |
| | | const aKeys = new Set(Object.keys(a)); |
| | | const bKeys = new Set(Object.keys(b)); |
| | | const aKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(a))); |
| | | const bKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(b))); |
| | | /** @type {SplitChunksSizes} */ |
| | | const result = {}; |
| | | for (const key of aKeys) { |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Checks true if there are sizes and all existing sizes are at least minSize. |
| | | * @param {SplitChunksSizes} sizes the sizes |
| | | * @param {SplitChunksSizes} minSize the min sizes |
| | | * @returns {boolean} true if there are sizes and all existing sizes are at least `minSize` |
| | | */ |
| | | const checkMinSize = (sizes, minSize) => { |
| | | for (const key of Object.keys(minSize)) { |
| | | for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) { |
| | | const size = sizes[key]; |
| | | if (size === undefined || size === 0) continue; |
| | | if (size < minSize[key]) return false; |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Checks min size reduction. |
| | | * @param {SplitChunksSizes} sizes the sizes |
| | | * @param {SplitChunksSizes} minSizeReduction the min sizes |
| | | * @param {number} chunkCount number of chunks |
| | | * @param {CountOfChunk} chunkCount number of chunks |
| | | * @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction` |
| | | */ |
| | | const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => { |
| | | for (const key of Object.keys(minSizeReduction)) { |
| | | for (const key of /** @type {SourceType[]} */ ( |
| | | Object.keys(minSizeReduction) |
| | | )) { |
| | | const size = sizes[key]; |
| | | if (size === undefined || size === 0) continue; |
| | | if (size * chunkCount < minSizeReduction[key]) return false; |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Gets violating min sizes. |
| | | * @param {SplitChunksSizes} sizes the sizes |
| | | * @param {SplitChunksSizes} minSize the min sizes |
| | | * @returns {undefined | SourceTypes} list of size types that are below min size |
| | | */ |
| | | const getViolatingMinSizes = (sizes, minSize) => { |
| | | /** @type {SourceTypes | undefined} */ |
| | | let list; |
| | | for (const key of Object.keys(minSize)) { |
| | | for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) { |
| | | const size = sizes[key]; |
| | | if (size === undefined || size === 0) continue; |
| | | if (size < minSize[key]) { |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Returns the total size. |
| | | * @param {SplitChunksSizes} sizes the sizes |
| | | * @returns {number} the total size |
| | | * @returns {Size} the total size |
| | | */ |
| | | const totalSize = (sizes) => { |
| | | let size = 0; |
| | | for (const key of Object.keys(sizes)) { |
| | | for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) { |
| | | size += sizes[key]; |
| | | } |
| | | return size; |
| | | }; |
| | | |
| | | /** |
| | | * Returns a function to get the name of the chunk. |
| | | * @param {OptimizationSplitChunksCacheGroup["name"]} name the chunk name |
| | | * @returns {GetNameFn | undefined} a function to get the name of the chunk |
| | | */ |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Normalizes chunks filter. |
| | | * @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option |
| | | * @returns {ChunkFilterFn | undefined} the chunk filter function |
| | | */ |
| | |
| | | }; |
| | | |
| | | /** |
| | | * @param {undefined | GetCacheGroups | Record<string, false | string | RegExp | OptimizationSplitChunksGetCacheGroups | OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options |
| | | * Normalizes cache groups. |
| | | * @param {undefined | GetCacheGroups | Record<string, false | string | RegExp | RawGetCacheGroups | OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options |
| | | * @param {DefaultSizeTypes} defaultSizeTypes the default size types |
| | | * @returns {GetCacheGroups} a function to get the cache groups |
| | | */ |
| | |
| | | } |
| | | }); |
| | | } else if (typeof option === "function") { |
| | | /** @type {WeakMap<OptimizationSplitChunksCacheGroup, CacheGroupSource>} */ |
| | | const cache = new WeakMap(); |
| | | handlers.push((module, context, results) => { |
| | | const result = option(module); |
| | |
| | | } |
| | | } |
| | | /** |
| | | * Returns the matching cache groups. |
| | | * @param {Module} module the current module |
| | | * @param {CacheGroupsContext} context the current context |
| | | * @returns {CacheGroupSource[]} the matching cache groups |
| | |
| | | return () => null; |
| | | }; |
| | | |
| | | /** @typedef {(module: Module, context: CacheGroupsContext) => boolean} CheckTestFn */ |
| | | |
| | | /** |
| | | * Checks true, if the module should be selected. |
| | | * @param {OptimizationSplitChunksCacheGroup["test"]} test test option |
| | | * @param {Module} module the module |
| | | * @param {CacheGroupsContext} context context object |
| | |
| | | return false; |
| | | }; |
| | | |
| | | /** @typedef {(type: string) => boolean} CheckModuleTypeFn */ |
| | | |
| | | /** |
| | | * Checks module type. |
| | | * @param {OptimizationSplitChunksCacheGroup["type"]} test type option |
| | | * @param {Module} module the module |
| | | * @returns {boolean} true, if the module should be selected |
| | |
| | | return false; |
| | | }; |
| | | |
| | | /** @typedef {(layer: string | null) => boolean} CheckModuleLayerFn */ |
| | | |
| | | /** |
| | | * Checks module layer. |
| | | * @param {OptimizationSplitChunksCacheGroup["layer"]} test type option |
| | | * @param {Module} module the module |
| | | * @returns {boolean} true, if the module should be selected |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Creates a cache group source. |
| | | * @param {OptimizationSplitChunksCacheGroup} options the group options |
| | | * @param {string} key key of cache group |
| | | * @param {DefaultSizeTypes} defaultSizeTypes the default size types |
| | |
| | | |
| | | module.exports = class SplitChunksPlugin { |
| | | /** |
| | | * Creates an instance of SplitChunksPlugin. |
| | | * @param {OptimizationSplitChunksOptions=} options plugin options |
| | | */ |
| | | constructor(options = {}) { |
| | |
| | | } |
| | | |
| | | /** |
| | | * Returns the cache group (cached). |
| | | * @param {CacheGroupSource} cacheGroupSource source |
| | | * @returns {CacheGroup} the cache group (cached) |
| | | */ |
| | |
| | | } |
| | | |
| | | /** |
| | | * Apply the plugin |
| | | * Applies the plugin by registering its hooks on the compiler. |
| | | * @param {Compiler} compiler the compiler instance |
| | | * @returns {void} |
| | | */ |
| | |
| | | index <<= ONE; |
| | | } |
| | | /** |
| | | * @param {Iterable<Chunk>} chunks list of chunks |
| | | * Returns key of the chunks. |
| | | * @param {Iterable<Chunk, undefined, undefined>} chunks list of chunks |
| | | * @returns {bigint | Chunk} key of the chunks |
| | | */ |
| | | const getKey = (chunks) => { |
| | |
| | | return key; |
| | | }; |
| | | /** |
| | | * Returns stringified key. |
| | | * @param {bigint | Chunk} key key of the chunks |
| | | * @returns {string} stringified key |
| | | */ |
| | |
| | | }; |
| | | |
| | | const getChunkSetsInGraph = memoize(() => { |
| | | /** @type {Map<bigint, Set<Chunk>>} */ |
| | | /** @type {Map<bigint, ChunkSet>} */ |
| | | const chunkSetsInGraph = new Map(); |
| | | /** @type {Set<Chunk>} */ |
| | | /** @type {ChunkSet} */ |
| | | const singleChunkSets = new Set(); |
| | | for (const module of compilation.modules) { |
| | | const chunks = chunkGraph.getModuleChunksIterable(module); |
| | |
| | | }); |
| | | |
| | | /** |
| | | * Group chunks by exports. |
| | | * @param {Module} module the module |
| | | * @returns {Iterable<Chunk[]>} groups of chunks with equal exports |
| | | */ |
| | | const groupChunksByExports = (module) => { |
| | | const exportsInfo = moduleGraph.getExportsInfo(module); |
| | | /** @type {Map<string, Chunk[]>} */ |
| | | const groupedByUsedExports = new Map(); |
| | | for (const chunk of chunkGraph.getModuleChunksIterable(module)) { |
| | | const key = exportsInfo.getUsageKey(chunk.runtime); |
| | |
| | | /** @type {Map<Module, Iterable<Chunk[]>>} */ |
| | | const groupedByExportsMap = new Map(); |
| | | |
| | | /** @typedef {Map<bigint | Chunk, ChunkSet>} ChunkSetsInGraph */ |
| | | |
| | | const getExportsChunkSetsInGraph = memoize(() => { |
| | | /** @type {Map<bigint | Chunk, Set<Chunk>>} */ |
| | | /** @type {ChunkSetsInGraph} */ |
| | | const chunkSetsInGraph = new Map(); |
| | | /** @type {Set<Chunk>} */ |
| | | /** @type {ChunkSet} */ |
| | | const singleChunkSets = new Set(); |
| | | for (const module of compilation.modules) { |
| | | const groupedChunks = [...groupChunksByExports(module)]; |
| | |
| | | return { chunkSetsInGraph, singleChunkSets }; |
| | | }); |
| | | |
| | | /** @typedef {Map<number, Set<Chunk>[]>} ChunkSetsByCount */ |
| | | /** @typedef {Map<CountOfChunk, ChunkSet[]>} ChunkSetsByCount */ |
| | | |
| | | // group these set of chunks by count |
| | | // to allow to check less sets via isSubset |
| | | // (only smaller sets can be subset) |
| | | /** |
| | | * @param {IterableIterator<Set<Chunk>>} chunkSets set of sets of chunks |
| | | * Group chunk sets by count. |
| | | * @param {IterableIterator<ChunkSet>} chunkSets set of sets of chunks |
| | | * @returns {ChunkSetsByCount} map of sets of chunks by count |
| | | */ |
| | | const groupChunkSetsByCount = (chunkSets) => { |
| | |
| | | ) |
| | | ); |
| | | |
| | | /** @typedef {(Set<Chunk> | Chunk)[]} Combinations */ |
| | | /** @typedef {(ChunkSet | Chunk)[]} Combinations */ |
| | | |
| | | // Create a list of possible combinations |
| | | /** |
| | | * @param {Map<bigint | Chunk, Set<Chunk>>} chunkSets chunk sets |
| | | * @param {Set<Chunk>} singleChunkSets single chunks sets |
| | | * @param {Map<number, Set<Chunk>[]>} chunkSetsByCount chunk sets by count |
| | | * Creates a get combinations. |
| | | * @param {ChunkSetsInGraph} chunkSets chunk sets |
| | | * @param {ChunkSet} singleChunkSets single chunks sets |
| | | * @param {ChunkSetsByCount} chunkSetsByCount chunk sets by count |
| | | * @returns {(key: bigint | Chunk) => Combinations} combinations |
| | | */ |
| | | const createGetCombinations = ( |
| | |
| | | return result; |
| | | } |
| | | const chunksSet = |
| | | /** @type {Set<Chunk>} */ |
| | | /** @type {ChunkSet} */ |
| | | (chunkSets.get(key)); |
| | | /** @type {Combinations} */ |
| | | const array = [chunksSet]; |
| | |
| | | }); |
| | | |
| | | /** |
| | | * Returns combinations by key. |
| | | * @param {bigint | Chunk} key key |
| | | * @returns {Combinations} combinations by key |
| | | */ |
| | |
| | | ); |
| | | }); |
| | | /** |
| | | * Gets exports combinations. |
| | | * @param {bigint | Chunk} key key |
| | | * @returns {Combinations} exports combinations by key |
| | | */ |
| | |
| | | getExportsCombinationsFactory()(key); |
| | | |
| | | /** |
| | | * Defines the selected chunks result type used by this module. |
| | | * @typedef {object} SelectedChunksResult |
| | | * @property {Chunk[]} chunks the list of chunks |
| | | * @property {bigint | Chunk} key a key of the list |
| | | */ |
| | | |
| | | /** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */ |
| | | /** @typedef {WeakMap<ChunkFilterFn, SelectedChunksResult>} ChunkMap */ |
| | | /** @type {WeakMap<ChunkSet | Chunk, ChunkMap>} */ |
| | | const selectedChunksCacheByChunksSet = new WeakMap(); |
| | | |
| | | /** |
| | | * get list and key by applying the filter function to the list |
| | | * It is cached for performance reasons |
| | | * @param {Set<Chunk> | Chunk} chunks list of chunks |
| | | * @param {ChunkSet | Chunk} chunks list of chunks |
| | | * @param {ChunkFilterFn} chunkFilter filter function for chunks |
| | | * @returns {SelectedChunksResult} list and key |
| | | */ |
| | | const getSelectedChunks = (chunks, chunkFilter) => { |
| | | let entry = selectedChunksCacheByChunksSet.get(chunks); |
| | | if (entry === undefined) { |
| | | /** @type {ChunkMap} */ |
| | | entry = new WeakMap(); |
| | | selectedChunksCacheByChunksSet.set(chunks, entry); |
| | | } |
| | |
| | | const chunksInfoMap = new Map(); |
| | | |
| | | /** |
| | | * Adds module to chunks info map. |
| | | * @param {CacheGroup} cacheGroup the current cache group |
| | | * @param {number} cacheGroupIndex the index of the cache group of ordering |
| | | * @param {Chunk[]} selectedChunks chunks selected for this module |
| | |
| | | const getCombsByUsedExports = memoize(() => { |
| | | // fill the groupedByExportsMap |
| | | getExportsChunkSetsInGraph(); |
| | | /** @type {Set<Set<Chunk> | Chunk>} */ |
| | | /** @type {Set<ChunkSet | Chunk>} */ |
| | | const set = new Set(); |
| | | const groupedByUsedExports = |
| | | /** @type {Iterable<Chunk[]>} */ |
| | |
| | | logger.time("queue"); |
| | | |
| | | /** |
| | | * Removes modules with source type. |
| | | * @param {ChunksInfoItem} info entry |
| | | * @param {SourceTypes} sourceTypes source types to be removed |
| | | */ |
| | |
| | | }; |
| | | |
| | | /** |
| | | * Removes min size violating modules. |
| | | * @param {ChunksInfoItem} info entry |
| | | * @returns {boolean} true, if entry become empty |
| | | */ |
| | |
| | | } |
| | | |
| | | /** |
| | | * Defines the max size queue item type used by this module. |
| | | * @typedef {object} MaxSizeQueueItem |
| | | * @property {SplitChunksSizes} minSize |
| | | * @property {SplitChunksSizes} maxAsyncSize |
| | |
| | | |
| | | while (chunksInfoMap.size > 0) { |
| | | // Find best matching entry |
| | | /** @type {undefined | string} */ |
| | | let bestEntryKey; |
| | | /** @type {undefined | ChunksInfoItem} */ |
| | | let bestEntry; |
| | | for (const pair of chunksInfoMap) { |
| | | const key = pair[0]; |
| | |
| | | item.cacheGroup._conditionalEnforce && |
| | | checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold); |
| | | |
| | | /** @type {Set<Chunk>} */ |
| | | const usedChunks = new Set(item.chunks); |
| | | |
| | | // Check if maxRequests condition can be fulfilled |
| | |
| | | usedChunks.size === 1 |
| | | ) { |
| | | const [chunk] = usedChunks; |
| | | /** @type {SplitChunksSizes} */ |
| | | const chunkSizes = Object.create(null); |
| | | for (const module of chunkGraph.getChunkModulesIterable(chunk)) { |
| | | if (!item.modules.has(module)) { |
| | |
| | | if (Object.keys(maxSize).length === 0) { |
| | | continue; |
| | | } |
| | | for (const key of Object.keys(maxSize)) { |
| | | for (const key of /** @type {SourceType[]} */ ( |
| | | Object.keys(maxSize) |
| | | )) { |
| | | const maxSizeValue = maxSize[key]; |
| | | const minSizeValue = minSize[key]; |
| | | if ( |
| | |
| | | return key; |
| | | }, |
| | | getSize(module) { |
| | | /** @type {Sizes} */ |
| | | const size = Object.create(null); |
| | | for (const key of module.getSourceTypes()) { |
| | | size[key] = module.size(key); |