Update npm packages (73 packages including @jqhtml 2.3.36)

Update npm registry domain from privatenpm.hanson.xyz to npm.internal.hanson.xyz

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2026-02-20 11:31:28 +00:00
parent d01a6179aa
commit b5eb27a827
1690 changed files with 47348 additions and 16848 deletions

57
node_modules/webpack/lib/util/AppendOnlyStackedSet.js generated vendored Normal file
View File

@@ -0,0 +1,57 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Ivan Kopeykin @vankop
*/
"use strict";
/**
* @template T
*/
class AppendOnlyStackedSet {
/**
* @param {Set<T>[]} sets an optional array of sets
*/
constructor(sets = []) {
/** @type {Set<T>[]} */
this._sets = sets;
/** @type {Set<T> | undefined} */
this._current = undefined;
}
/**
* @param {T} el element
*/
add(el) {
if (!this._current) {
this._current = new Set();
this._sets.push(this._current);
}
this._current.add(el);
}
/**
* @param {T} el element
* @returns {boolean} result
*/
has(el) {
for (const set of this._sets) {
if (set.has(el)) return true;
}
return false;
}
clear() {
this._sets = [];
if (this._current) this._current.clear();
}
/**
* @returns {AppendOnlyStackedSet<T>} child
*/
createChild() {
return new AppendOnlyStackedSet(this._sets.length ? [...this._sets] : []);
}
}
module.exports = AppendOnlyStackedSet;

View File

@@ -6,6 +6,7 @@
"use strict";
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
/** @typedef {string | typeof Hash} HashFunction */
class Hash {
/* istanbul ignore next */

View File

@@ -11,6 +11,7 @@
* @returns {T | undefined} last item
*/
const last = (set) => {
/** @type {T | undefined} */
let last;
for (const item of set) last = item;
return last;

View File

@@ -56,6 +56,7 @@ class LazyBucketSortedSet {
this._keys = new SortableSet(undefined, comparator);
/** @type {Map<K, Entry<T, K>>} */
this._map = new Map();
/** @type {Set<T>} */
this._unsortedItems = new Set();
this.size = 0;
}

View File

@@ -222,6 +222,7 @@ class LazySet {
*/
static deserialize({ read }) {
const count = read();
/** @type {T[]} */
const items = [];
for (let i = 0; i < count; i++) {
items.push(read());

View File

@@ -35,6 +35,7 @@ class ParallelismFactorCalculator {
a < b ? -1 : 1
);
const parallelism = segments.map(() => 0);
/** @type {number[]} */
const rangeStartIndices = [];
for (let i = 0; i < this._rangePoints.length; i += 2) {
const start = this._rangePoints[i];

View File

@@ -88,6 +88,7 @@ class SortableSet extends Set {
return;
}
/** @type {T[]} */
const sortedArray = [...this].sort(sortFn);
super.clear();
for (let i = 0; i < sortedArray.length; i += 1) {

View File

@@ -62,6 +62,7 @@ class StringXor {
const valueLen = value.length;
if (valueLen < len) {
const newValue = (this._value = Buffer.allocUnsafe(len));
/** @type {number} */
let i;
for (i = 0; i < valueLen; i++) {
newValue[i] = value[i] ^ str.charCodeAt(i);

View File

@@ -64,6 +64,11 @@ const compileSearch = (funcName, predicate, reversed, extraArgs, earlyOut) => {
return code.join("");
};
/**
* @template T
* @typedef {(items: T[], start: number, compareFn?: number | ((item: T, needle: number) => number), l?: number, h?: number) => number} Search
*/
/**
* This helper functions generate code for two binary search functions:
* A(): Performs a binary search on an array using the comparison operator specified.
@@ -74,7 +79,7 @@ const compileSearch = (funcName, predicate, reversed, extraArgs, earlyOut) => {
* @param {boolean} reversed Whether the search should be reversed.
* @param {SearchPredicateSuffix} suffix The suffix to be used in the function name.
* @param {boolean=} earlyOut Whether the search should return as soon as a match is found.
* @returns {(items: T[], start: number, compareFn?: number | ((item: T, needle: number) => number), l?: number, h?: number) => number} The compiled binary search function.
* @returns {Search<T>} The compiled binary search function.
*/
const compileBoundsSearch = (predicate, reversed, suffix, earlyOut) => {
const arg1 = compileSearch("A", `x${predicate}y`, reversed, ["y"], earlyOut);
@@ -106,6 +111,14 @@ return dispatchBinarySearch";
return result();
};
const fns = {
ge: compileBoundsSearch(">=", false, "GE"),
gt: compileBoundsSearch(">", false, "GT"),
lt: compileBoundsSearch("<", true, "LT"),
le: compileBoundsSearch("<=", true, "LE"),
eq: compileBoundsSearch("-", true, "EQ", true)
};
/**
* These functions are used to perform binary searches on arrays.
* @example
@@ -120,10 +133,4 @@ return dispatchBinarySearch";
* const index2 = le(arr, 5); // index2 === 4
* ```
*/
module.exports = {
ge: compileBoundsSearch(">=", false, "GE"),
gt: compileBoundsSearch(">", false, "GT"),
lt: compileBoundsSearch("<", true, "LT"),
le: compileBoundsSearch("<=", true, "LE"),
eq: compileBoundsSearch("-", true, "EQ", true)
};
module.exports = fns;

View File

@@ -23,7 +23,7 @@
* @param {IdRanges | undefined} ranges cumulative range of ids for each of allIds
* @param {ModuleGraph} moduleGraph moduleGraph
* @param {Dependency} dependency dependency
* @returns {{trimmedIds: string[], trimmedRange: Range}} computed trimmed ids and cumulative range of those ids
* @returns {{ trimmedIds: string[], trimmedRange: Range }} computed trimmed ids and cumulative range of those ids
*/
module.exports.getTrimmedIdsAndRange = (
untrimmedIds,

View File

@@ -7,7 +7,8 @@
/** @type {WeakMap<EXPECTED_OBJECT, WeakMap<EXPECTED_OBJECT, EXPECTED_OBJECT>>} */
const mergeCache = new WeakMap();
/** @type {WeakMap<EXPECTED_OBJECT, Map<string, Map<string | number | boolean, EXPECTED_OBJECT>>>} */
/** @typedef {Map<string, Map<string | number | boolean, EXPECTED_OBJECT>>} InnerPropertyCache */
/** @type {WeakMap<EXPECTED_OBJECT, InnerPropertyCache>} */
const setPropertyCache = new WeakMap();
const DELETE = Symbol("DELETE");
const DYNAMIC_INFO = Symbol("cleverMerge dynamic info");
@@ -406,6 +407,7 @@ const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
} else {
// = first.base + (first.byProperty + second.byProperty)
// need to merge first and second byValues
/** @type {Map<string, T & O>} */
const newByValues = new Map(firstEntry.byValues);
for (const [key, value] of /** @type {ByValues} */ (
secondEntry.byValues
@@ -444,7 +446,9 @@ const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
byValues: secondEntry.byValues
};
}
/** @type {O[keyof O] | T[keyof T] | (T[keyof T] & O[keyof O]) | (T[keyof T] | undefined)[] | (O[keyof O] | undefined)[] | (O[keyof O] | T[keyof T] | undefined)[] | undefined} */
let newBase;
/** @type {Map<string, (T & O) | O[keyof O] | (O[keyof O] | undefined)[] | ((T & O) | undefined)[] | (T & O & O[keyof O]) | ((T & O) | O[keyof O] | undefined)[] | undefined>} */
const intermediateByValues = new Map(firstEntry.byValues);
for (const [key, value] of intermediateByValues) {
intermediateByValues.set(
@@ -485,6 +489,7 @@ const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
`${firstEntry.byProperty} and ${secondEntry.byProperty} for a single property is not supported`
);
}
/** @type {Map<string, (T & O) | O[keyof O] | (O[keyof O] | undefined)[] | (T & O & O[keyof O]) | ((T & O) | undefined)[] | ((T & O) | O[keyof O] | undefined)[] | undefined>} */
const newByValues = new Map(intermediateByValues);
for (const [key, value] of /** @type {ByValues} */ (
secondEntry.byValues

View File

@@ -5,6 +5,7 @@
"use strict";
const { getFullModuleName } = require("../ids/IdHelpers");
const { compareRuntime } = require("./runtime");
/** @typedef {import("../Chunk")} Chunk */
@@ -13,6 +14,7 @@ const { compareRuntime } = require("./runtime");
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGraph").ModuleId} ModuleId */
/** @typedef {import("../ChunkGroup")} ChunkGroup */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Dependency").DependencyLocation} DependencyLocation */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../dependencies/HarmonyImportSideEffectDependency")} HarmonyImportSideEffectDependency */
@@ -316,6 +318,19 @@ const compareModulesByIdOrIdentifier = (chunkGraph, a, b) => {
return compareIds(a.identifier(), b.identifier());
};
/**
* Compare modules by their full name. This differs from comparing by identifier in that the values have been normalized to be relative to the compiler context.
* @param {{ context: string, root: object }} compiler the compiler, used for context and cache
* @param {Module} a module
* @param {Module} b module
* @returns {-1 | 0 | 1} compare result
*/
const compareModulesByFullName = (compiler, a, b) => {
const aName = getFullModuleName(a, compiler.context, compiler.root);
const bName = getFullModuleName(b, compiler.context, compiler.root);
return compareIds(aName, bName);
};
/**
* @param {ChunkGraph} chunkGraph the chunk graph
* @param {Chunk} a chunk
@@ -519,7 +534,7 @@ const sortWithSourceOrder = (
dependencySourceOrderMap,
onDependencyReSort
) => {
/** @type {{dep: Dependency, main: number, sub: number}[]} */
/** @type {{ dep: Dependency, main: number, sub: number }[]} */
const withSourceOrder = [];
/** @type {number[]} */
const positions = [];
@@ -590,6 +605,11 @@ module.exports.compareIterables = compareIterables;
module.exports.compareLocations = compareLocations;
/** @type {ParameterizedComparator<Compiler, Module>} */
module.exports.compareModulesByFullName = createCachedParameterizedComparator(
compareModulesByFullName
);
/** @type {ParameterizedComparator<ChunkGraph, Module>} */
module.exports.compareModulesById =
createCachedParameterizedComparator(compareModulesById);

View File

@@ -252,10 +252,10 @@ const itemsToRegexp = (itemsArr) => {
// special case for 2 items with common suffix
if (finishedItems.length === 0 && items.size === 2) {
/** @type {Iterator<string>} */
/** @type {SetIterator<string>} */
const it = items[Symbol.iterator]();
const a = it.next().value;
const b = it.next().value;
const a = /** @type {string} */ (it.next().value);
const b = /** @type {string} */ (it.next().value);
if (a.length > 0 && b.length > 0 && a.slice(-1) === b.slice(-1)) {
return `${itemsToRegexp([a.slice(0, -1), b.slice(0, -1)])}${quoteMeta(
a.slice(-1)

View File

@@ -110,7 +110,7 @@ function findNewName(oldName, usedNamed1, usedNamed2, extraInfo) {
// Remove uncool stuff
extraInfo = extraInfo.replace(
/\.+\/|(\/index)?\.([a-zA-Z0-9]{1,4})($|\s|\?)|\s*\+\s*\d+\s*modules/g,
/\.+\/|(?:\/index)?\.[a-zA-Z0-9]{1,4}(?:$|\s|\?)|\s*\+\s*\d+\s*modules/g,
""
);
@@ -199,9 +199,10 @@ const RESERVED_NAMES = new Set(
);
/** @typedef {{ usedNames: UsedNames, alreadyCheckedScopes: ScopeSet }} ScopeInfo */
/** @typedef {Map<string, ScopeInfo>} UsedNamesInScopeInfo */
/**
* @param {Map<string, ScopeInfo>} usedNamesInScopeInfo used names in scope info
* @param {UsedNamesInScopeInfo} usedNamesInScopeInfo used names in scope info
* @param {string} module module identifier
* @param {string} id export id
* @returns {ScopeInfo} info

View File

@@ -20,7 +20,7 @@ const preserveCamelCase = (string) => {
for (let i = 0; i < result.length; i++) {
const character = result[i];
if (isLastCharLower && /[\p{Lu}]/u.test(character)) {
if (isLastCharLower && /\p{Lu}/u.test(character)) {
result = `${result.slice(0, i)}-${result.slice(i)}`;
isLastCharLower = false;
isLastLastCharUpper = isLastCharUpper;
@@ -29,7 +29,7 @@ const preserveCamelCase = (string) => {
} else if (
isLastCharUpper &&
isLastLastCharUpper &&
/[\p{Ll}]/u.test(character)
/\p{Ll}/u.test(character)
) {
result = `${result.slice(0, i - 1)}-${result.slice(i - 1)}`;
isLastLastCharUpper = isLastCharUpper;
@@ -84,6 +84,7 @@ module.exports.camelCase = (input) => {
* @returns {string[]} results
*/
module.exports.cssExportConvention = (input, convention) => {
/** @type {Set<string>} */
const set = new Set();
if (typeof convention === "function") {
set.add(convention(input));

View File

@@ -100,6 +100,7 @@ const subtractSizeFrom = (total, size) => {
* @returns {Sizes} total size
*/
const sumSize = (nodes) => {
/** @type {Sizes} */
const sum = Object.create(null);
for (const node of nodes) {
addSizeTo(sum, node.size);
@@ -145,7 +146,7 @@ const isTooSmall = (size, minSize) => {
* @returns {Types} set of types that are too small
*/
const getTooSmallTypes = (size, minSize) => {
/** @typedef {Types} */
/** @type {Types} */
const types = new Set();
for (const key of Object.keys(size)) {
const s = size[key];
@@ -223,9 +224,13 @@ class Group {
* @returns {Node<T>[] | undefined} removed nodes
*/
popNodes(filter) {
/** @type {Node<T>[]} */
const newNodes = [];
/** @type {Similarities} */
const newSimilarities = [];
/** @type {Node<T>[]} */
const resultNodes = [];
/** @type {undefined | Node<T>} */
let lastNode;
for (let i = 0; i < this.nodes.length; i++) {
const node = this.nodes[i];
@@ -260,6 +265,7 @@ const getSimilarities = (nodes) => {
// calculate similarities between lexically adjacent nodes
/** @type {Similarities} */
const similarities = [];
/** @type {undefined | Node<T>} */
let last;
for (const node of nodes) {
if (last !== undefined) {
@@ -399,6 +405,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
// going minSize from left and right
// at least one node need to be included otherwise we get stuck
let left = 1;
/** @type {Sizes} */
const leftSize = Object.create(null);
addSizeTo(leftSize, group.nodes[0].size);
while (left < group.nodes.length && isTooSmall(leftSize, minSize)) {
@@ -406,6 +413,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
left++;
}
let right = group.nodes.length - 2;
/** @type {Sizes} */
const rightSize = Object.create(null);
addSizeTo(rightSize, group.nodes[group.nodes.length - 1].size);
while (right >= 0 && isTooSmall(rightSize, minSize)) {
@@ -425,6 +433,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
if (left - 1 > right) {
// We try to remove some problematic nodes to "fix" that
/** @type {Sizes} */
let prevSize;
if (right < group.nodes.length - left) {
subtractSizeFrom(rightSize, group.nodes[right + 1].size);
@@ -490,6 +499,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
// create two new groups for left and right area
// and queue them up
/** @type {Node<T>[]} */
const rightNodes = [group.nodes[right + 1]];
/** @type {Similarities} */
const rightSimilarities = [];
@@ -501,6 +511,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
}
queue.push(new Group(rightNodes, rightSimilarities));
/** @type {Node<T>[]} */
const leftNodes = [group.nodes[0]];
/** @type {Similarities} */
const leftSimilarities = [];
@@ -523,6 +534,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
});
// give every group a name
/** @type {Set<string>} */
const usedNames = new Set();
for (let i = 0; i < result.length; i++) {
const group = result[i];

View File

@@ -10,16 +10,10 @@ const urlUtils = require("url");
const { isAbsolute, join } = require("./fs");
/** @typedef {import("./fs").InputFileSystem} InputFileSystem */
/**
* @typedef {(input: string | Buffer<ArrayBufferLike>, resourcePath: string, fs: InputFileSystem) => Promise<{source: string | Buffer<ArrayBufferLike>, sourceMap: string | RawSourceMap | undefined, fileDependencies: string[]}>} SourceMapExtractorFunction
*/
/** @typedef {string | Buffer<ArrayBufferLike>} StringOrBuffer */
/** @typedef {(input: StringOrBuffer, resourcePath: string, fs: InputFileSystem) => Promise<{ source: StringOrBuffer, sourceMap: string | RawSourceMap | undefined, fileDependencies: string[] }>} SourceMapExtractorFunction */
/** @typedef {import("webpack-sources").RawSourceMap} RawSourceMap */
/**
* @typedef {(resourcePath: string) => Promise<string | Buffer<ArrayBufferLike>>} ReadResource
*/
/** @typedef {(resourcePath: string) => Promise<StringOrBuffer>} ReadResource */
/**
* @typedef {object} SourceMappingURL
@@ -52,6 +46,7 @@ const sourceMappingURLRegex = new RegExp(
*/
function getSourceMappingURL(code) {
const lines = code.split(/^/m);
/** @type {RegExpMatchArray | null | undefined} */
let match;
for (let i = lines.length - 1; i >= 0; i--) {
@@ -104,13 +99,14 @@ function isURL(value) {
* @param {ReadResource} readResource read resource function
* @param {string[]} possibleRequests array of possible file paths
* @param {string} errorsAccumulator accumulated error messages
* @returns {Promise<{path: string, data?: string}>} source content promise
* @returns {Promise<{ path: string, data?: string }>} source content promise
*/
async function fetchPathsFromURL(
readResource,
possibleRequests,
errorsAccumulator = ""
) {
/** @type {StringOrBuffer} */
let result;
try {
@@ -146,7 +142,7 @@ async function fetchPathsFromURL(
* @param {string} url source URL
* @param {string=} sourceRoot source root directory
* @param {boolean=} skipReading whether to skip reading file content
* @returns {Promise<{sourceURL: string, sourceContent?: string | Buffer<ArrayBufferLike>}>} source content promise
* @returns {Promise<{ sourceURL: string, sourceContent?: StringOrBuffer }>} source content promise
*/
async function fetchFromURL(
readResource,
@@ -181,9 +177,11 @@ async function fetchFromURL(
if (isAbsolute(url)) {
let sourceURL = path.normalize(url);
/** @type {undefined | StringOrBuffer} */
let sourceContent;
if (!skipReading) {
/** @type {string[]} */
const possibleRequests = [sourceURL];
if (url.startsWith("/")) {
@@ -203,6 +201,7 @@ async function fetchFromURL(
// 4. Relative path
const sourceURL = getAbsolutePath(context, url, sourceRoot || "");
/** @type {undefined | StringOrBuffer} */
let sourceContent;
if (!skipReading) {
@@ -214,10 +213,10 @@ async function fetchFromURL(
/**
* Extract source map from code content
* @param {string | Buffer<ArrayBufferLike>} stringOrBuffer The input code content as string or buffer
* @param {StringOrBuffer} stringOrBuffer The input code content as string or buffer
* @param {string} resourcePath The path to the resource file
* @param {ReadResource} readResource The read resource function
* @returns {Promise<{source: string | Buffer<ArrayBufferLike>, sourceMap: string | RawSourceMap | undefined}>} Promise resolving to extracted source map information
* @returns {Promise<{ source: StringOrBuffer, sourceMap: string | RawSourceMap | undefined }>} Promise resolving to extracted source map information
*/
async function extractSourceMap(stringOrBuffer, resourcePath, readResource) {
const input =

34
node_modules/webpack/lib/util/fs.js generated vendored
View File

@@ -8,7 +8,9 @@
const path = require("path");
/** @typedef {import("../../declarations/WebpackOptions").WatchOptions} WatchOptions */
/** @typedef {import("../FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */
/** @typedef {import("watchpack").Entry} Entry */
/** @typedef {import("watchpack").OnlySafeTimeEntry} OnlySafeTimeEntry */
/** @typedef {import("watchpack").ExistenceOnlyTimeEntry} ExistenceOnlyTimeEntry */
/**
* @template T
@@ -83,7 +85,7 @@ const path = require("path");
/** @typedef {(err: NodeJS.ErrnoException | null, result?: number) => void} NumberCallback */
/** @typedef {(err: NodeJS.ErrnoException | Error | null, result?: JsonObject) => void} ReadJsonCallback */
/** @typedef {Map<string, FileSystemInfoEntry | "ignore">} TimeInfoEntries */
/** @typedef {Map<string, Entry | OnlySafeTimeEntry | ExistenceOnlyTimeEntry | null | "ignore">} TimeInfoEntries */
/** @typedef {Set<string>} Changes */
/** @typedef {Set<string>} Removals */
@@ -201,7 +203,7 @@ const path = require("path");
/**
* @typedef {{
* (path: PathLike, options?: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined; } | BufferEncoding | null): string[],
* (path: PathLike, options?: { encoding: BufferEncoding | null, withFileTypes?: false | undefined, recursive?: boolean | undefined } | BufferEncoding | null): string[],
* (path: PathLike, options: { encoding: "buffer", withFileTypes?: false | undefined, recursive?: boolean | undefined } | "buffer"): Buffer[],
* (path: PathLike, options?: (ObjectEncodingOptions & { withFileTypes?: false | undefined, recursive?: boolean | undefined }) | BufferEncoding | null): string[] | Buffer[],
* (path: PathLike, options: ObjectEncodingOptions & { withFileTypes: true, recursive?: boolean | undefined }): Dirent[],
@@ -225,8 +227,8 @@ const path = require("path");
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined,
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats,
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats,
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats,
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined,
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats,
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined,
* }} StatSync
*/
@@ -246,8 +248,8 @@ const path = require("path");
* (path: PathLike, options: StatSyncOptions & { bigint: true, throwIfNoEntry: false }): IBigIntStats | undefined,
* (path: PathLike, options?: StatSyncOptions & { bigint?: false | undefined }): IStats,
* (path: PathLike, options: StatSyncOptions & { bigint: true }): IBigIntStats,
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats,
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined,
* (path: PathLike, options: StatSyncOptions & { bigint: boolean, throwIfNoEntry?: false | undefined }): IStats | IBigIntStats,
* (path: PathLike, options?: StatSyncOptions): IStats | IBigIntStats | undefined,
* }} LStatSync
*/
@@ -256,7 +258,7 @@ const path = require("path");
* (path: PathLike, options: EncodingOption, callback: StringCallback): void,
* (path: PathLike, options: BufferEncodingOption, callback: BufferCallback): void,
* (path: PathLike, options: EncodingOption, callback: StringOrBufferCallback): void,
* (path: PathLike, callback: StringCallback): void;
* (path: PathLike, callback: StringCallback): void,
* }} RealPath
*/
@@ -324,21 +326,14 @@ const path = require("path");
/**
* @typedef {{
* (file: PathLike, options: MakeDirectoryOptions & { recursive: true }, callback: StringCallback): void,
* (file: PathLike, options: Mode | (MakeDirectoryOptions & { recursive?: false | undefined; }) | null | undefined, callback: NoParamCallback): void,
* (file: PathLike, options: Mode | (MakeDirectoryOptions & { recursive?: false | undefined }) | null | undefined, callback: NoParamCallback): void,
* (file: PathLike, options: Mode | MakeDirectoryOptions | null | undefined, callback: StringCallback): void,
* (file: PathLike, callback: NoParamCallback): void,
* }} Mkdir
*/
/**
* @typedef {{ maxRetries?: number | undefined, recursive?: boolean | undefined, retryDelay?: number | undefined }} RmDirOptions
*/
/**
* @typedef {{
* (file: PathLike, callback: NoParamCallback): void,
* (file: PathLike, options: RmDirOptions, callback: NoParamCallback): void,
* }} Rmdir
* @typedef {{ (file: PathLike, callback: NoParamCallback): void }} Rmdir
*/
/**
@@ -405,7 +400,7 @@ const path = require("path");
*/
/**
* @typedef {FSImplementation & { write: (...args: EXPECTED_ANY[]) => EXPECTED_ANY; close?: (...args: EXPECTED_ANY[]) => EXPECTED_ANY }} CreateWriteStreamFSImplementation
* @typedef {FSImplementation & { write: (...args: EXPECTED_ANY[]) => EXPECTED_ANY, close?: (...args: EXPECTED_ANY[]) => EXPECTED_ANY }} CreateWriteStreamFSImplementation
*/
/**
@@ -422,7 +417,7 @@ const path = require("path");
/**
* @typedef {{
* (file: PathLike, flags: OpenMode | undefined, mode: Mode | undefined | null, callback: NumberCallback): void,
* (file: PathLike, flags: OpenMode | undefined, mode: Mode | undefined | null, callback: NumberCallback): void,
* (file: PathLike, flags: OpenMode | undefined, callback: NumberCallback): void,
* (file: PathLike, callback: NumberCallback): void,
* }} Open
@@ -613,6 +608,7 @@ const readJson = (fs, p, callback) => {
}
fs.readFile(p, (err, buf) => {
if (err) return callback(err);
/** @type {JsonObject} */
let data;
try {
data = JSON.parse(/** @type {Buffer} */ (buf).toString("utf8"));

View File

@@ -7,6 +7,7 @@
const Hash = require("../Hash");
const { digest, update } = require("./hash-digest");
/** @type {number} */
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
@@ -17,8 +18,11 @@ class BatchedHash extends Hash {
*/
constructor(hash) {
super();
/** @type {undefined | string} */
this.string = undefined;
/** @type {undefined | Encoding} */
this.encoding = undefined;
/** @type {Hash} */
this.hash = hash;
}

View File

@@ -15,7 +15,7 @@ const BULK_SIZE = 3;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
/** @type {{[key: string]: Map<string, string>}} */
/** @type {{ [key: string]: Map<string, string> }} */
const digestCaches = {};
class BulkUpdateHash extends Hash {
@@ -25,14 +25,20 @@ class BulkUpdateHash extends Hash {
*/
constructor(hashOrFactory, hashKey) {
super();
/** @type {undefined | string} */
this.hashKey = hashKey;
if (typeof hashOrFactory === "function") {
/** @type {undefined | HashFactory} */
this.hashFactory = hashOrFactory;
/** @type {undefined | Hash} */
this.hash = undefined;
} else {
/** @type {undefined | HashFactory} */
this.hashFactory = undefined;
/** @type {undefined | Hash} */
this.hash = hashOrFactory;
}
/** @type {string} */
this.buffer = "";
}
@@ -103,6 +109,7 @@ class BulkUpdateHash extends Hash {
* @returns {string | Buffer} digest
*/
digest(encoding) {
/** @type {undefined | Map<string, string | Buffer>} */
let digestCache;
const buffer = this.buffer;
if (this.hash === undefined) {

View File

@@ -49,6 +49,7 @@ const encode = (buffer, base) => {
value = (value << EIGHT) | BigInt(buffer[i]);
}
// Convert to baseX string efficiently using array
/** @type {string[]} */
const digits = [];
if (value === ZERO) return ENCODE_TABLE[base][0];
while (value > ZERO) {

View File

@@ -13,6 +13,14 @@ const Hash = require("../Hash");
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
/**
* @typedef {object} WasmExports
* @property {WebAssembly.Memory} memory
* @property {() => void} init
* @property {(length: number) => void} update
* @property {(length: number) => void} final
*/
class WasmHash extends Hash {
/**
* @param {WebAssembly.Instance} instance wasm instance
@@ -23,13 +31,19 @@ class WasmHash extends Hash {
constructor(instance, instancesPool, chunkSize, digestSize) {
super();
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
const exports = /** @type {WasmExports} */ (instance.exports);
exports.init();
/** @type {WasmExports} */
this.exports = exports;
/** @type {Buffer} */
this.mem = Buffer.from(exports.memory.buffer, 0, 65536);
/** @type {number} */
this.buffered = 0;
/** @type {WebAssembly.Instance[]} */
this.instancesPool = instancesPool;
/** @type {number} */
this.chunkSize = chunkSize;
/** @type {number} */
this.digestSize = digestSize;
}
@@ -85,6 +99,7 @@ class WasmHash extends Hash {
*/
_updateWithShortString(data, encoding) {
const { exports, buffered, mem, chunkSize } = this;
/** @type {number} */
let endPos;
if (data.length < 70) {
// eslint-disable-next-line unicorn/text-encoding-identifier-case
@@ -212,5 +227,6 @@ const create = (wasmModule, instancesPool, chunkSize, digestSize) => {
);
};
create.MAX_SHORT_STRING = MAX_SHORT_STRING;
module.exports = create;
module.exports.MAX_SHORT_STRING = MAX_SHORT_STRING;

View File

@@ -6,7 +6,7 @@
const path = require("path");
const WINDOWS_ABS_PATH_REGEXP = /^[a-zA-Z]:[\\/]/;
const WINDOWS_ABS_PATH_REGEXP = /^[a-z]:[\\/]/i;
const SEGMENTS_SPLIT_REGEXP = /([|!])/;
const WINDOWS_PATH_SEPARATOR_REGEXP = /\\/g;
@@ -165,7 +165,8 @@ const makeCacheable = (realFn) => {
* @returns {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} cacheable function with context
*/
const makeCacheableWithContext = (fn) => {
/** @type {WeakMap<AssociatedObjectForCache, Map<string, Map<string, string>>>} */
/** @typedef {Map<string, Map<string, string>>} InnerCache */
/** @type {WeakMap<AssociatedObjectForCache, InnerCache>} */
const cache = new WeakMap();
/** @type {MakeCacheableWithContextResult & { bindCache: BindCacheForContext, bindContextCache: BindContextCacheForContext }} */
@@ -178,6 +179,7 @@ const makeCacheableWithContext = (fn) => {
cache.set(associatedObjectForCache, innerCache);
}
/** @type {undefined | string} */
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
@@ -196,6 +198,7 @@ const makeCacheableWithContext = (fn) => {
/** @type {BindCacheForContext} */
cachedFn.bindCache = (associatedObjectForCache) => {
/** @type {undefined | InnerCache} */
let innerCache;
if (associatedObjectForCache) {
innerCache = cache.get(associatedObjectForCache);
@@ -213,6 +216,7 @@ const makeCacheableWithContext = (fn) => {
* @returns {string} the returned relative path
*/
const boundFn = (context, identifier) => {
/** @type {undefined | string} */
let cachedResult;
let innerSubCache = innerCache.get(context);
if (innerSubCache === undefined) {
@@ -234,6 +238,7 @@ const makeCacheableWithContext = (fn) => {
/** @type {BindContextCacheForContext} */
cachedFn.bindContextCache = (context, associatedObjectForCache) => {
/** @type {undefined | Map<string, string>} */
let innerSubCache;
if (associatedObjectForCache) {
let innerCache = cache.get(associatedObjectForCache);

View File

@@ -16,7 +16,7 @@ module.exports.createMagicCommentContext = () =>
codeGeneration: { strings: false, wasm: false }
});
module.exports.webpackCommentRegExp = new RegExp(
/(^|\W)webpack[A-Z]{1,}[A-Za-z]{1,}:/
/(^|\W)webpack[A-Z][A-Za-z]+:/
);
// regexp to match at least one "magic comment"

View File

@@ -5,7 +5,7 @@
"use strict";
const SAFE_IDENTIFIER = /^[_a-zA-Z$][_a-zA-Z$0-9]*$/;
const SAFE_IDENTIFIER = /^[_a-z$][_a-z$0-9]*$/i;
const RESERVED_IDENTIFIER = new Set([
"break",
"case",

View File

@@ -168,6 +168,7 @@ register(
deserialize({ read }) {
const source = new ReplaceSource(read(), read());
const len = read();
/** @type {number[]} */
const startEndBuffer = [];
for (let i = 0; i < len; i++) {
startEndBuffer.push(read(), read());

View File

@@ -10,7 +10,8 @@ const SortableSet = require("./SortableSet");
/** @typedef {import("../Compilation")} Compilation */
/** @typedef {import("../Entrypoint").EntryOptions} EntryOptions */
/** @typedef {string | SortableSet<string> | undefined} RuntimeSpec */
/** @typedef {SortableSet<string>} RuntimeSpecSortableSet */
/** @typedef {string | RuntimeSpecSortableSet | undefined} RuntimeSpec */
/** @typedef {RuntimeSpec | boolean} RuntimeCondition */
/**
@@ -20,7 +21,9 @@ const SortableSet = require("./SortableSet");
* @returns {RuntimeSpec} runtime
*/
const getEntryRuntime = (compilation, name, options) => {
/** @type {EntryOptions["dependOn"]} */
let dependOn;
/** @type {EntryOptions["runtime"]} */
let runtime;
if (options) {
({ dependOn, runtime } = options);
@@ -194,6 +197,7 @@ const mergeRuntime = (a, b) => {
return a;
} else if (typeof a === "string") {
if (typeof b === "string") {
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet();
set.add(a);
set.add(b);
@@ -201,16 +205,19 @@ const mergeRuntime = (a, b) => {
} else if (b.has(a)) {
return b;
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet(b);
set.add(a);
return set;
}
if (typeof b === "string") {
if (a.has(b)) return a;
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet(a);
set.add(b);
return set;
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet(a);
for (const item of b) set.add(item);
if (set.size === a.size) return a;
@@ -271,14 +278,17 @@ const mergeRuntimeOwned = (a, b) => {
if (typeof b === "string") {
return b;
}
/** @type {RuntimeSpecSortableSet} */
return new SortableSet(b);
} else if (typeof a === "string") {
if (typeof b === "string") {
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet();
set.add(a);
set.add(b);
return set;
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet(b);
set.add(a);
return set;
@@ -315,6 +325,7 @@ const intersectRuntime = (a, b) => {
if (a.has(b)) return b;
return;
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet();
for (const item of b) {
if (a.has(item)) set.add(item);
@@ -354,10 +365,12 @@ const subtractRuntime = (a, b) => {
if (item !== b) return item;
}
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet(a);
set.delete(b);
return set;
}
/** @type {RuntimeSpecSortableSet} */
const set = new SortableSet();
for (const item of a) {
if (!b.has(item)) set.add(item);
@@ -394,6 +407,7 @@ const filterRuntime = (runtime, filter) => {
if (typeof runtime === "string") return filter(runtime);
let some = false;
let every = true;
/** @type {RuntimeSpec} */
let result;
for (const r of runtime) {
const v = filter(r);

View File

@@ -266,6 +266,7 @@ module.exports.parseRange = (str) => {
const items = [];
const r = /[-0-9A-Za-z]\s+/g;
var start = 0;
/** @type {RegExpExecArray | null} */
var match;
while ((match = r.exec(str))) {
const end = match.index + 1;

View File

@@ -9,7 +9,7 @@ const memoize = require("./memoize");
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_END_OPERATION_TYPE} MEASURE_END_OPERATION */
/** @typedef {import("../serialization/BinaryMiddleware").MEASURE_START_OPERATION_TYPE} MEASURE_START_OPERATION */
/** @typedef {typeof import("../util/Hash")} Hash */
/** @typedef {import("../util/Hash").HashFunction} HashFunction */
/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
/**
@@ -111,7 +111,7 @@ module.exports = {
/**
* @template D, S, C
* @param {IntermediateFileSystem} fs filesystem
* @param {string | Hash} hashFunction hash function to use
* @param {HashFunction} hashFunction hash function to use
* @returns {Serializer<D, S, C>} file serializer
*/
createFileSerializer: (fs, hashFunction) => {