Enhance refactor commands with controller-aware Route() updates and fix code quality violations

Add semantic token highlighting for 'that' variable and comment file references in VS Code extension
Add Phone_Text_Input and Currency_Input components with formatting utilities
Implement client widgets, form standardization, and soft delete functionality
Add modal scroll lock and update documentation
Implement comprehensive modal system with form integration and validation
Fix modal component instantiation using jQuery plugin API
Implement modal system with responsive sizing, queuing, and validation support
Implement form submission with validation, error handling, and loading states
Implement country/state selectors with dynamic data loading and Bootstrap styling
Revert Rsx::Route() highlighting in Blade/PHP files
Target specific PHP scopes for Rsx::Route() highlighting in Blade
Expand injection selector for Rsx::Route() highlighting
Add custom syntax highlighting for Rsx::Route() and Rsx.Route() calls
Update jqhtml packages to v2.2.165
Add bundle path validation for common mistakes (development mode only)
Create Ajax_Select_Input widget and Rsx_Reference_Data controller
Create Country_Select_Input widget with default country support
Initialize Tom Select on Select_Input widgets
Add Tom Select bundle for enhanced select dropdowns
Implement ISO 3166 geographic data system for country/region selection
Implement widget-based form system with disabled state support

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2025-10-30 06:21:56 +00:00
parent e678b987c2
commit f6ac36c632
5683 changed files with 5854736 additions and 22329 deletions

View File

@@ -23,9 +23,9 @@ module.exports.equals = (a, b) => {
/**
* Partition an array by calling a predicate function on each value.
* @template T
* @param {Array<T>} arr Array of values to be partitioned
* @param {T[]} arr Array of values to be partitioned
* @param {(value: T) => boolean} fn Partition function which partitions based on truthiness of result.
* @returns {[Array<T>, Array<T>]} returns the values of `arr` partitioned into two new arrays based on fn predicate.
* @returns {[T[], T[]]} returns the values of `arr` partitioned into two new arrays based on fn predicate.
*/
module.exports.groupBy = (
// eslint-disable-next-line default-param-last
@@ -34,9 +34,9 @@ module.exports.groupBy = (
) =>
arr.reduce(
/**
* @param {[Array<T>, Array<T>]} groups An accumulator storing already partitioned values returned from previous call.
* @param {[T[], T[]]} groups An accumulator storing already partitioned values returned from previous call.
* @param {T} value The value of the current element
* @returns {[Array<T>, Array<T>]} returns an array of partitioned groups accumulator resulting from calling a predicate on the current value.
* @returns {[T[], T[]]} returns an array of partitioned groups accumulator resulting from calling a predicate on the current value.
*/
(groups, value) => {
groups[fn(value) ? 0 : 1].push(value);

View File

@@ -21,6 +21,7 @@ let inHandleResult = 0;
* @callback Callback
* @param {(WebpackError | null)=} err
* @param {(T | null)=} result
* @returns {void}
*/
/**

View File

@@ -5,14 +5,31 @@
"use strict";
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
class Hash {
/* istanbul ignore next */
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @abstract
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
const AbstractMethodError = require("../AbstractMethodError");
@@ -24,8 +41,21 @@ class Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @abstract
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
const AbstractMethodError = require("../AbstractMethodError");

View File

@@ -24,7 +24,7 @@ const merge = (targetSet, toMerge) => {
/**
* @template T
* @param {Set<Iterable<T>>} targetSet set where iterables should be added
* @param {Array<LazySet<T>>} toDeepMerge lazy sets to be flattened
* @param {LazySet<T>[]} toDeepMerge lazy sets to be flattened
* @returns {void}
*/
const flatten = (targetSet, toDeepMerge) => {
@@ -59,7 +59,7 @@ class LazySet {
this._set = new Set(iterable);
/** @type {Set<Iterable<T>>} */
this._toMerge = new Set();
/** @type {Array<LazySet<T>>} */
/** @type {LazySet<T>[]} */
this._toDeepMerge = [];
this._needMerge = false;
this._deopt = false;

View File

@@ -5,8 +5,6 @@
"use strict";
new Map().entries();
/**
* The StackedCacheMap is a data structure designed as an alternative to a Map
* in situations where you need to handle multiple item additions and

View File

@@ -112,9 +112,14 @@ class TupleSet {
* @returns {Iterator<[T, V, ...EXPECTED_ANY]>} iterator
*/
[Symbol.iterator]() {
/**
* @template T, V
* @typedef {MapIterator<[T, InnerMap<T, V> | Set<V>]>} IteratorStack
*/
// This is difficult to type because we can have a map inside a map inside a map, etc. where the end is a set (each key is an argument)
// But in basic use we only have 2 arguments in our methods, so we have `Map<K, Set<V>>`
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>[]} */
/** @type {IteratorStack<T, V>[]} */
const iteratorStack = [];
/** @type {[T?, V?, ...EXPECTED_ANY]} */
const tuple = [];
@@ -122,7 +127,7 @@ class TupleSet {
let currentSetIterator;
/**
* @param {MapIterator<[T, InnerMap<T, V> | Set<V>]>} it iterator
* @param {IteratorStack<T, V>} it iterator
* @returns {boolean} result
*/
const next = (it) => {
@@ -131,7 +136,7 @@ class TupleSet {
if (iteratorStack.length === 0) return false;
tuple.pop();
return next(
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>} */
/** @type {IteratorStack<T, V>} */
(iteratorStack.pop())
);
}
@@ -155,7 +160,7 @@ class TupleSet {
tuple.pop();
if (
!next(
/** @type {MapIterator<[T, InnerMap<T, V> | Set<V>]>} */
/** @type {IteratorStack<T, V>} */
(iteratorStack.pop())
)
) {

View File

@@ -5,7 +5,6 @@
"use strict";
/** @typedef {import("./fs").InputFileSystem} InputFileSystem */
/** @typedef {(error: Error|null, result?: Buffer) => void} ErrorFirstCallback */
const backSlashCharCode = "\\".charCodeAt(0);

View File

@@ -25,7 +25,7 @@ const isWeakKey = (thing) => typeof thing === "object" && thing !== null;
/**
* @template {unknown[]} T
* @typedef {T extends readonly (infer ElementType)[] ? ElementType : never} ArrayElement
* @typedef {T extends ReadonlyArray<infer ElementType> ? ElementType : never} ArrayElement
*/
/**

View File

@@ -10,6 +10,8 @@
/** @typedef {import("../ModuleGraph")} ModuleGraph */
/** @typedef {import("../javascript/JavascriptParser").Range} Range */
/** @typedef {Range[]} IdRanges */
/**
* @summary Get the subset of ids and their corresponding range in an id chain that should be re-rendered by webpack.
* Only those in the chain that are actually referring to namespaces or imports should be re-rendered.
@@ -18,7 +20,7 @@
* because minifiers treat quoted accessors differently. e.g. import { a } from "./module"; a["b"] vs a.b
* @param {string[]} untrimmedIds chained ids
* @param {Range} untrimmedRange range encompassing allIds
* @param {Range[] | undefined} ranges cumulative range of ids for each of allIds
* @param {IdRanges | undefined} ranges cumulative range of ids for each of allIds
* @param {ModuleGraph} moduleGraph moduleGraph
* @param {Dependency} dependency dependency
* @returns {{trimmedIds: string[], trimmedRange: Range}} computed trimmed ids and cumulative range of those ids

View File

@@ -92,7 +92,7 @@ const cachedSetProperty = (obj, property, value) => {
* @template T
* @typedef {object} ObjectParsedPropertyEntry
* @property {T[keyof T] | undefined} base base value
* @property {string | undefined} byProperty the name of the selector property
* @property {`by${string}` | undefined} byProperty the name of the selector property
* @property {ByValues | undefined} byValues value depending on selector property, merged with base
*/
@@ -105,7 +105,7 @@ const cachedSetProperty = (obj, property, value) => {
/**
* @template {object} T
* @typedef {{ byProperty: string, fn: DynamicFunction }} ParsedObjectDynamic
* @typedef {{ byProperty: `by${string}`, fn: DynamicFunction }} ParsedObjectDynamic
*/
/**
@@ -169,7 +169,7 @@ const parseObject = (obj) => {
for (const key of Object.keys(obj)) {
const entry = getInfo(/** @type {keyof T} */ (key));
if (entry.byProperty === undefined) {
entry.byProperty = byProperty;
entry.byProperty = /** @type {`by${string}`} */ (byProperty);
entry.byValues = new Map();
} else if (entry.byProperty !== byProperty) {
throw new Error(
@@ -196,7 +196,7 @@ const parseObject = (obj) => {
} else if (typeof byObj === "function") {
if (dynamicInfo === undefined) {
dynamicInfo = {
byProperty: key,
byProperty: /** @type {`by${string}`} */ (key),
fn: byObj
};
} else {
@@ -222,17 +222,16 @@ const parseObject = (obj) => {
/**
* @template {object} T
* @param {ParsedObjectStatic<T>} info static properties (key is property name)
* @param {{ byProperty: string, fn: DynamicFunction } | undefined} dynamicInfo dynamic part
* @param {{ byProperty: `by${string}`, fn: DynamicFunction } | undefined} dynamicInfo dynamic part
* @returns {T} the object
*/
const serializeObject = (info, dynamicInfo) => {
const obj = /** @type {T} */ ({});
const obj = /** @type {EXPECTED_ANY} */ ({});
// Setup byProperty structure
for (const entry of info.values()) {
if (entry.byProperty !== undefined) {
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
const byProperty = entry.byProperty;
const byObj = (obj[byProperty] = obj[byProperty] || {});
for (const byValue of /** @type {ByValues} */ (entry.byValues).keys()) {
byObj[byValue] = byObj[byValue] || {};
}
@@ -240,13 +239,12 @@ const serializeObject = (info, dynamicInfo) => {
}
for (const [key, entry] of info) {
if (entry.base !== undefined) {
obj[/** @type {keyof T} */ (key)] = entry.base;
obj[key] = entry.base;
}
// Fill byProperty structure
if (entry.byProperty !== undefined) {
const byProperty = /** @type {keyof T} */ (entry.byProperty);
const byObj = (obj[byProperty] =
obj[byProperty] || /** @type {TODO} */ ({}));
const byProperty = entry.byProperty;
const byObj = (obj[byProperty] = obj[byProperty] || {});
for (const byValue of Object.keys(byObj)) {
const value = getFromByValues(
/** @type {ByValues} */
@@ -258,8 +256,7 @@ const serializeObject = (info, dynamicInfo) => {
}
}
if (dynamicInfo !== undefined) {
/** @type {TODO} */
(obj)[dynamicInfo.byProperty] = dynamicInfo.fn;
obj[dynamicInfo.byProperty] = dynamicInfo.fn;
}
return obj;
};
@@ -384,7 +381,7 @@ const _cleverMerge = (first, second, internalCaching = false) => {
* @param {ObjectParsedPropertyEntry<T>} firstEntry a
* @param {ObjectParsedPropertyEntry<O>} secondEntry b
* @param {boolean} internalCaching should parsing of objects and nested merges be cached
* @returns {ObjectParsedPropertyEntry<TODO>} new entry
* @returns {ObjectParsedPropertyEntry<T> | ObjectParsedPropertyEntry<O> | ObjectParsedPropertyEntry<T & O>} new entry
*/
const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
switch (getValueType(secondEntry.base)) {
@@ -479,7 +476,7 @@ const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
if (!secondEntry.byProperty) {
// = first.base + (first.byProperty + second.base)
return {
base: newBase,
base: /** @type {T[keyof T] & O[keyof O]} */ (newBase),
byProperty: firstEntry.byProperty,
byValues: intermediateByValues
};
@@ -499,7 +496,7 @@ const mergeEntries = (firstEntry, secondEntry, internalCaching) => {
);
}
return {
base: newBase,
base: /** @type {T[keyof T] & O[keyof O]} */ (newBase),
byProperty: firstEntry.byProperty,
byValues: newByValues
};

View File

@@ -8,6 +8,7 @@
const { compareRuntime } = require("./runtime");
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Chunk").ChunkName} ChunkName */
/** @typedef {import("../Chunk").ChunkId} ChunkId */
/** @typedef {import("../ChunkGraph")} ChunkGraph */
/** @typedef {import("../ChunkGraph").ModuleId} ModuleId */
@@ -490,10 +491,7 @@ const compareChunksNatural = (chunkGraph) => {
const cmpFn = module.exports.compareModulesById(chunkGraph);
const cmpIterableFn = compareIterables(cmpFn);
return concatComparators(
compareSelect(
(chunk) => /** @type {string|number} */ (chunk.name),
compareIds
),
compareSelect((chunk) => /** @type {ChunkName} */ (chunk.name), compareIds),
compareSelect((chunk) => chunk.runtime, compareRuntime),
compareSelect(
/**

View File

@@ -56,28 +56,30 @@ const compileBooleanMatcherFromLists = (positiveItems, negativeItems) => {
return (value) => `!/^${negativeRegexp}$/.test(${value})`;
};
/** @typedef {string[][]} ListOfCommonItems */
/**
* @param {Set<string>} itemsSet items set
* @param {(str: string) => string | false} getKey get key function
* @param {(str: Array<string>) => boolean} condition condition
* @returns {Array<Array<string>>} list of common items
* @param {(str: string[]) => boolean} condition condition
* @returns {ListOfCommonItems} list of common items
*/
const popCommonItems = (itemsSet, getKey, condition) => {
/** @type {Map<string, Array<string>>} */
/** @type {Map<string, string[]>} */
const map = new Map();
for (const item of itemsSet) {
const key = getKey(item);
if (key) {
let list = map.get(key);
if (list === undefined) {
/** @type {Array<string>} */
/** @type {string[]} */
list = [];
map.set(key, list);
}
list.push(item);
}
}
/** @type {Array<Array<string>>} */
/** @type {ListOfCommonItems} */
const result = [];
for (const list of map.values()) {
if (condition(list)) {
@@ -91,7 +93,7 @@ const popCommonItems = (itemsSet, getKey, condition) => {
};
/**
* @param {Array<string>} items items
* @param {string[]} items items
* @returns {string} common prefix
*/
const getCommonPrefix = (items) => {
@@ -109,7 +111,7 @@ const getCommonPrefix = (items) => {
};
/**
* @param {Array<string>} items items
* @param {string[]} items items
* @returns {string} common suffix
*/
const getCommonSuffix = (items) => {
@@ -127,14 +129,14 @@ const getCommonSuffix = (items) => {
};
/**
* @param {Array<string>} itemsArr array of items
* @param {string[]} itemsArr array of items
* @returns {string} regexp
*/
const itemsToRegexp = (itemsArr) => {
if (itemsArr.length === 1) {
return quoteMeta(itemsArr[0]);
}
/** @type {Array<string>} */
/** @type {string[]} */
const finishedItems = [];
// merge single char items: (a|b|c|d|ef) => ([abcd]|ef)

View File

@@ -12,7 +12,6 @@ const Template = require("../Template");
/** @typedef {import("eslint-scope").Variable} Variable */
/** @typedef {import("estree").Node} Node */
/** @typedef {import("../javascript/JavascriptParser").Range} Range */
/** @typedef {import("../javascript/JavascriptParser").Program} Program */
/** @typedef {Set<string>} UsedNames */
const DEFAULT_EXPORT = "__WEBPACK_DEFAULT_EXPORT__";
@@ -72,7 +71,7 @@ const getPathInAst = (ast, node) => {
}
} else if (ast && typeof ast === "object") {
const keys =
/** @type {Array<keyof Node>} */
/** @type {(keyof Node)[]} */
(Object.keys(ast));
for (let i = 0; i < keys.length; i++) {
// We are making the faster check in `enterNode` using `n.range`

View File

@@ -8,8 +8,7 @@
const memoize = require("./memoize");
/** @typedef {import("schema-utils").Schema} Schema */
/** @typedef {import("schema-utils/declarations/validate").ValidationErrorConfiguration} ValidationErrorConfiguration */
/** @typedef {import("./fs").JsonObject} JsonObject */
/** @typedef {import("schema-utils").ValidationErrorConfiguration} ValidationErrorConfiguration */
const getValidate = memoize(() => require("schema-utils").validate);

View File

@@ -7,9 +7,10 @@
const Hash = require("./Hash");
/** @typedef {import("../../declarations/WebpackOptions").HashDigest} Encoding */
/** @typedef {import("../../declarations/WebpackOptions").HashFunction} HashFunction */
const BULK_SIZE = 2000;
const BULK_SIZE = 3;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
@@ -38,9 +39,22 @@ class BulkUpdateDecorator extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (
@@ -55,7 +69,11 @@ class BulkUpdateDecorator extends Hash {
this.hash.update(this.buffer);
this.buffer = "";
}
this.hash.update(data, inputEncoding);
if (typeof data === "string" && inputEncoding) {
this.hash.update(data, inputEncoding);
} else {
this.hash.update(data);
}
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
@@ -71,8 +89,19 @@ class BulkUpdateDecorator extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
let digestCache;
@@ -91,9 +120,19 @@ class BulkUpdateDecorator extends Hash {
if (buffer.length > 0) {
this.hash.update(buffer);
}
if (!encoding) {
const result = this.hash.digest();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
const digestResult = this.hash.digest(encoding);
// Compatibility with the old hash library
const result =
typeof digestResult === "string" ? digestResult : digestResult.toString();
typeof digestResult === "string"
? digestResult
: /** @type {NodeJS.TypedArray} */ (digestResult).toString();
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
@@ -110,9 +149,22 @@ class DebugHash extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf8");
@@ -132,8 +184,19 @@ class DebugHash extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
@@ -186,14 +249,21 @@ module.exports = (algorithm) => {
case "native-md4":
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() => /** @type {typeof import("crypto")} */ (crypto).createHash("md4"),
() =>
/** @type {Hash} */ (
/** @type {typeof import("crypto")} */
(crypto).createHash("md4")
),
"md4"
);
default:
if (crypto === undefined) crypto = require("crypto");
return new BulkUpdateDecorator(
() =>
/** @type {typeof import("crypto")} */ (crypto).createHash(algorithm),
/** @type {Hash} */ (
/** @type {typeof import("crypto")} */
(crypto).createHash(algorithm)
),
algorithm
);
}

39
node_modules/webpack/lib/util/dataURL.js generated vendored Executable file
View File

@@ -0,0 +1,39 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Natsu @xiaoxiaojx
*/
"use strict";
// data URL scheme: "data:text/javascript;charset=utf-8;base64,some-string"
// http://www.ietf.org/rfc/rfc2397.txt
const URIRegEx = /^data:([^;,]+)?((?:;[^;,]+)*?)(?:;(base64)?)?,(.*)$/i;
/**
* @param {string} uri data URI
* @returns {Buffer | null} decoded data
*/
const decodeDataURI = (uri) => {
const match = URIRegEx.exec(uri);
if (!match) return null;
const isBase64 = match[3];
const body = match[4];
if (isBase64) {
return Buffer.from(body, "base64");
}
// CSS allows to use `data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg"><rect width="100%" height="100%" style="stroke: rgb(223,224,225); stroke-width: 2px; fill: none; stroke-dasharray: 6px 3px" /></svg>`
// so we return original body if we can't `decodeURIComponent`
try {
return Buffer.from(decodeURIComponent(body), "ascii");
} catch (_) {
return Buffer.from(body, "ascii");
}
};
module.exports = {
URIRegEx,
decodeDataURI
};

View File

@@ -75,38 +75,37 @@ const DISABLED_METHODS = [
/**
* @template T
* @typedef {Set<T> & {[Symbol.isConcatSpreadable]?: boolean} & { push?: (...items: T[]) => void } & { [P in DISABLED_METHODS_NAMES]?: () => void } & { [P in COPY_METHODS_NAMES]?: () => TODO }} SetWithDeprecatedArrayMethods
* @typedef {Set<T> & { [Symbol.isConcatSpreadable]: boolean } & { push: (...items: T[]) => void, length?: number } & { [P in DISABLED_METHODS_NAMES]: () => void } & { [P in COPY_METHODS_NAMES]: P extends keyof Array<T> ? () => Pick<Array<T>, P> : never }} SetWithDeprecatedArrayMethods
*/
/**
* @template T
* @param {SetWithDeprecatedArrayMethods<T>} set new set
* @param {Set<T>} set new set
* @param {string} name property name
* @returns {void}
*/
module.exports.arrayToSetDeprecation = (set, name) => {
for (const method of COPY_METHODS) {
if (set[method]) continue;
if (/** @type {SetWithDeprecatedArrayMethods<T>} */ (set)[method]) continue;
const d = createDeprecation(
`${name} was changed from Array to Set (using Array method '${method}' is deprecated)`,
"ARRAY_TO_SET"
);
/**
* @deprecated
* @this {Set<T>}
* @returns {number} count
*/
// eslint-disable-next-line func-names
set[method] = function () {
d();
// eslint-disable-next-line unicorn/prefer-spread
const array = Array.from(this);
return Array.prototype[/** @type {keyof COPY_METHODS} */ (method)].apply(
array,
// eslint-disable-next-line prefer-rest-params
arguments
);
};
/** @type {EXPECTED_ANY} */
(set)[method] =
// eslint-disable-next-line func-names
function () {
d();
// eslint-disable-next-line unicorn/prefer-spread
const array = Array.from(this);
return Array.prototype[
/** @type {keyof COPY_METHODS} */ (method)
].apply(
array,
// eslint-disable-next-line prefer-rest-params
arguments
);
};
}
const dPush = createDeprecation(
`${name} was changed from Array to Set (using Array method 'push' is deprecated)`,
@@ -120,12 +119,8 @@ module.exports.arrayToSetDeprecation = (set, name) => {
`${name} was changed from Array to Set (indexing Array is deprecated)`,
"ARRAY_TO_SET_INDEXER"
);
/**
* @deprecated
* @this {Set<T>}
* @returns {number} count
*/
set.push = function push() {
/** @type {SetWithDeprecatedArrayMethods<T>} */
(set).push = function push() {
dPush();
// eslint-disable-next-line prefer-rest-params, unicorn/prefer-spread
for (const item of Array.from(arguments)) {
@@ -134,9 +129,10 @@ module.exports.arrayToSetDeprecation = (set, name) => {
return this.size;
};
for (const method of DISABLED_METHODS) {
if (set[method]) continue;
if (/** @type {SetWithDeprecatedArrayMethods<T>} */ (set)[method]) continue;
set[method] = () => {
/** @type {SetWithDeprecatedArrayMethods<T>} */
(set)[method] = () => {
throw new Error(
`${name} was changed from Array to Set (using Array method '${method}' is not possible)`
);
@@ -191,13 +187,14 @@ module.exports.arrayToSetDeprecation = (set, name) => {
);
}
});
set[Symbol.isConcatSpreadable] = true;
/** @type {SetWithDeprecatedArrayMethods<T>} */
(set)[Symbol.isConcatSpreadable] = true;
};
/**
* @template T
* @param {string} name name
* @returns {{ new <T = any>(values?: readonly T[] | null): SetDeprecatedArray<T> }} SetDeprecatedArray
* @returns {{ new <T = any>(values?: ReadonlyArray<T> | null): SetDeprecatedArray<T> }} SetDeprecatedArray
*/
module.exports.createArrayToSetDeprecationSet = (name) => {
let initialized = false;
@@ -207,14 +204,15 @@ module.exports.createArrayToSetDeprecationSet = (name) => {
*/
class SetDeprecatedArray extends Set {
/**
* @param {readonly T[] | null=} items items
* @param {ReadonlyArray<T> | null=} items items
*/
constructor(items) {
super(items);
if (!initialized) {
initialized = true;
module.exports.arrayToSetDeprecation(
SetDeprecatedArray.prototype,
/** @type {SetWithDeprecatedArrayMethods<T>} */
(SetDeprecatedArray.prototype),
name
);
}

View File

@@ -70,9 +70,11 @@ const getName = (a, b, usedNames) => {
return a;
};
/** @typedef {Record<string, number>} Sizes */
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @param {Sizes} total total size
* @param {Sizes} size single size
* @returns {void}
*/
const addSizeTo = (total, size) => {
@@ -82,8 +84,8 @@ const addSizeTo = (total, size) => {
};
/**
* @param {Record<string, number>} total total size
* @param {Record<string, number>} size single size
* @param {Sizes} total total size
* @param {Sizes} size single size
* @returns {void}
*/
const subtractSizeFrom = (total, size) => {
@@ -95,7 +97,7 @@ const subtractSizeFrom = (total, size) => {
/**
* @template T
* @param {Iterable<Node<T>>} nodes some nodes
* @returns {Record<string, number>} total size
* @returns {Sizes} total size
*/
const sumSize = (nodes) => {
const sum = Object.create(null);
@@ -106,8 +108,8 @@ const sumSize = (nodes) => {
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} maxSize minimum size
* @param {Sizes} size size
* @param {Sizes} maxSize minimum size
* @returns {boolean} true, when size is too big
*/
const isTooBig = (size, maxSize) => {
@@ -121,8 +123,8 @@ const isTooBig = (size, maxSize) => {
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @param {Sizes} size size
* @param {Sizes} minSize minimum size
* @returns {boolean} true, when size is too small
*/
const isTooSmall = (size, minSize) => {
@@ -136,8 +138,8 @@ const isTooSmall = (size, minSize) => {
};
/**
* @param {Record<string, number>} size size
* @param {Record<string, number>} minSize minimum size
* @param {Sizes} size size
* @param {Sizes} minSize minimum size
* @returns {Set<string>} set of types that are too small
*/
const getTooSmallTypes = (size, minSize) => {
@@ -166,7 +168,7 @@ const getNumberOfMatchingSizeTypes = (size, types) => {
};
/**
* @param {Record<string, number>} size size
* @param {Sizes} size size
* @param {Set<string>} types types
* @returns {number} selective size sum
*/
@@ -185,7 +187,7 @@ class Node {
/**
* @param {T} item item
* @param {string} key key
* @param {Record<string, number>} size size
* @param {Sizes} size size
*/
constructor(item, key, size) {
this.item = item;
@@ -194,14 +196,16 @@ class Node {
}
}
/** @typedef {number[]} Similarities */
/**
* @template T
*/
class Group {
/**
* @param {Node<T>[]} nodes nodes
* @param {number[] | null} similarities similarities between the nodes (length = nodes.length - 1)
* @param {Record<string, number>=} size size of the group
* @param {Similarities | null} similarities similarities between the nodes (length = nodes.length - 1)
* @param {Sizes=} size size of the group
*/
constructor(nodes, similarities, size) {
this.nodes = nodes;
@@ -228,7 +232,7 @@ class Group {
if (newNodes.length > 0) {
newSimilarities.push(
lastNode === this.nodes[i - 1]
? /** @type {number[]} */ (this.similarities)[i - 1]
? /** @type {Similarities} */ (this.similarities)[i - 1]
: similarity(/** @type {Node<T>} */ (lastNode).key, node.key)
);
}
@@ -247,11 +251,11 @@ class Group {
/**
* @template T
* @param {Iterable<Node<T>>} nodes nodes
* @returns {number[]} similarities
* @returns {Similarities} similarities
*/
const getSimilarities = (nodes) => {
// calculate similarities between lexically adjacent nodes
/** @type {number[]} */
/** @type {Similarities} */
const similarities = [];
let last;
for (const node of nodes) {
@@ -268,16 +272,16 @@ const getSimilarities = (nodes) => {
* @typedef {object} GroupedItems<T>
* @property {string} key
* @property {T[]} items
* @property {Record<string, number>} size
* @property {Sizes} size
*/
/**
* @template T
* @typedef {object} Options
* @property {Record<string, number>} maxSize maximum size of a group
* @property {Record<string, number>} minSize minimum size of a group (preferred over maximum size)
* @property {Sizes} maxSize maximum size of a group
* @property {Sizes} minSize minimum size of a group (preferred over maximum size)
* @property {Iterable<T>} items a list of items
* @property {(item: T) => Record<string, number>} getSize function to get size of an item
* @property {(item: T) => Sizes} getSize function to get size of an item
* @property {(item: T) => string} getKey function to get the key of an item
*/
@@ -320,7 +324,7 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
/**
* @param {Group<T>} group group
* @param {Record<string, number>} consideredSize size of the group to consider
* @param {Sizes} consideredSize size of the group to consider
* @returns {boolean} true, if the group was modified
*/
const removeProblematicNodes = (group, consideredSize = group.size) => {
@@ -455,9 +459,9 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
// rightSize ^^^^^^^^^^^^^^^
while (pos <= right + 1) {
const similarity = /** @type {number[]} */ (group.similarities)[
pos - 1
];
const similarity =
/** @type {Similarities} */
(group.similarities)[pos - 1];
if (
similarity < bestSimilarity &&
!isTooSmall(leftSize, minSize) &&
@@ -484,22 +488,22 @@ module.exports = ({ maxSize, minSize, items, getSize, getKey }) => {
// create two new groups for left and right area
// and queue them up
const rightNodes = [group.nodes[right + 1]];
/** @type {number[]} */
/** @type {Similarities} */
const rightSimilarities = [];
for (let i = right + 2; i < group.nodes.length; i++) {
rightSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
/** @type {Similarities} */ (group.similarities)[i - 1]
);
rightNodes.push(group.nodes[i]);
}
queue.push(new Group(rightNodes, rightSimilarities));
const leftNodes = [group.nodes[0]];
/** @type {number[]} */
/** @type {Similarities} */
const leftSimilarities = [];
for (let i = 1; i < left; i++) {
leftSimilarities.push(
/** @type {number[]} */ (group.similarities)[i - 1]
/** @type {Similarities} */ (group.similarities)[i - 1]
);
leftNodes.push(group.nodes[i]);
}

319
node_modules/webpack/lib/util/extractSourceMap.js generated vendored Executable file
View File

@@ -0,0 +1,319 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Natsu @xiaoxiaojx
*/
"use strict";
const path = require("path");
const urlUtils = require("url");
const { isAbsolute, join } = require("./fs");
/** @typedef {import("./fs").InputFileSystem} InputFileSystem */
/**
* @typedef {(input: string | Buffer<ArrayBufferLike>, resourcePath: string, fs: InputFileSystem) => Promise<{source: string | Buffer<ArrayBufferLike>, sourceMap: string | RawSourceMap | undefined, fileDependencies: string[]}>} SourceMapExtractorFunction
*/
/** @typedef {import("webpack-sources").RawSourceMap} RawSourceMap */
/**
* @typedef {(resourcePath: string) => Promise<string | Buffer<ArrayBufferLike>>} ReadResource
*/
/**
* @typedef {object} SourceMappingURL
* @property {string} sourceMappingURL
* @property {string} replacementString
*/
// Matches only the last occurrence of sourceMappingURL
const innerRegex = /\s*[#@]\s*sourceMappingURL\s*=\s*([^\s'"]*)\s*/;
const validProtocolPattern = /^[a-z][a-z0-9+.-]*:/i;
const sourceMappingURLRegex = new RegExp(
"(?:" +
"/\\*" +
"(?:\\s*\r?\n(?://)?)?" +
`(?:${innerRegex.source})` +
"\\s*" +
"\\*/" +
"|" +
`//(?:${innerRegex.source})` +
")" +
"\\s*"
);
/**
* Extract source mapping URL from code comments
* @param {string} code source code content
* @returns {SourceMappingURL} source mapping information
*/
function getSourceMappingURL(code) {
const lines = code.split(/^/m);
let match;
for (let i = lines.length - 1; i >= 0; i--) {
match = lines[i].match(sourceMappingURLRegex);
if (match) {
break;
}
}
const sourceMappingURL = match ? match[1] || match[2] || "" : "";
return {
sourceMappingURL: sourceMappingURL
? decodeURI(sourceMappingURL)
: sourceMappingURL,
replacementString: match ? match[0] : ""
};
}
/**
* Get absolute path for source file
* @param {string} context context directory
* @param {string} request file request
* @param {string} sourceRoot source root directory
* @returns {string} absolute path
*/
function getAbsolutePath(context, request, sourceRoot) {
if (sourceRoot) {
if (isAbsolute(sourceRoot)) {
return join(undefined, sourceRoot, request);
}
return join(undefined, join(undefined, context, sourceRoot), request);
}
return join(undefined, context, request);
}
/**
* Check if value is a URL
* @param {string} value string to check
* @returns {boolean} true if value is a URL
*/
function isURL(value) {
return validProtocolPattern.test(value) && !path.win32.isAbsolute(value);
}
/**
* Fetch from multiple possible file paths
* @param {ReadResource} readResource read resource function
* @param {string[]} possibleRequests array of possible file paths
* @param {string} errorsAccumulator accumulated error messages
* @returns {Promise<{path: string, data?: string}>} source content promise
*/
async function fetchPathsFromURL(
readResource,
possibleRequests,
errorsAccumulator = ""
) {
let result;
try {
result = await readResource(possibleRequests[0]);
} catch (error) {
errorsAccumulator += `${/** @type {Error} */ (error).message}\n\n`;
const [, ...tailPossibleRequests] = possibleRequests;
if (tailPossibleRequests.length === 0) {
/** @type {Error} */ (error).message = errorsAccumulator;
throw error;
}
return fetchPathsFromURL(
readResource,
tailPossibleRequests,
errorsAccumulator
);
}
return {
path: possibleRequests[0],
data: result.toString("utf8")
};
}
/**
* Fetch source content from URL
* @param {ReadResource} readResource The read resource function
* @param {string} context context directory
* @param {string} url source URL
* @param {string=} sourceRoot source root directory
* @param {boolean=} skipReading whether to skip reading file content
* @returns {Promise<{sourceURL: string, sourceContent?: string | Buffer<ArrayBufferLike>}>} source content promise
*/
async function fetchFromURL(
readResource,
context,
url,
sourceRoot,
skipReading = false
) {
// 1. It's an absolute url and it is not `windows` path like `C:\dir\file`
if (isURL(url)) {
// eslint-disable-next-line n/no-deprecated-api
const { protocol } = urlUtils.parse(url);
if (protocol === "data:") {
const sourceContent = skipReading ? "" : await readResource(url);
return { sourceURL: "", sourceContent };
}
if (protocol === "file:") {
const pathFromURL = urlUtils.fileURLToPath(url);
const sourceURL = path.normalize(pathFromURL);
const sourceContent = skipReading ? "" : await readResource(sourceURL);
return { sourceURL, sourceContent };
}
const sourceContent = skipReading ? "" : await readResource(url);
return { sourceURL: url, sourceContent };
}
// 3. Absolute path
if (isAbsolute(url)) {
let sourceURL = path.normalize(url);
let sourceContent;
if (!skipReading) {
const possibleRequests = [sourceURL];
if (url.startsWith("/")) {
possibleRequests.push(
getAbsolutePath(context, sourceURL.slice(1), sourceRoot || "")
);
}
const result = await fetchPathsFromURL(readResource, possibleRequests);
sourceURL = result.path;
sourceContent = result.data;
}
return { sourceURL, sourceContent };
}
// 4. Relative path
const sourceURL = getAbsolutePath(context, url, sourceRoot || "");
let sourceContent;
if (!skipReading) {
sourceContent = await readResource(sourceURL);
}
return { sourceURL, sourceContent };
}
/**
* Extract source map from code content
* @param {string | Buffer<ArrayBufferLike>} stringOrBuffer The input code content as string or buffer
* @param {string} resourcePath The path to the resource file
* @param {ReadResource} readResource The read resource function
* @returns {Promise<{source: string | Buffer<ArrayBufferLike>, sourceMap: string | RawSourceMap | undefined}>} Promise resolving to extracted source map information
*/
async function extractSourceMap(stringOrBuffer, resourcePath, readResource) {
const input =
typeof stringOrBuffer === "string"
? stringOrBuffer
: stringOrBuffer.toString("utf8");
const inputSourceMap = undefined;
const output = {
source: stringOrBuffer,
sourceMap: inputSourceMap
};
const { sourceMappingURL, replacementString } = getSourceMappingURL(input);
if (!sourceMappingURL) {
return output;
}
const baseContext = path.dirname(resourcePath);
const { sourceURL, sourceContent } = await fetchFromURL(
readResource,
baseContext,
sourceMappingURL
);
if (!sourceContent) {
return output;
}
/** @type {RawSourceMap} */
const map = JSON.parse(
sourceContent.toString("utf8").replace(/^\)\]\}'/, "")
);
const context = sourceURL ? path.dirname(sourceURL) : baseContext;
const resolvedSources = await Promise.all(
map.sources.map(
async (/** @type {string} */ source, /** @type {number} */ i) => {
const originalSourceContent =
map.sourcesContent &&
typeof map.sourcesContent[i] !== "undefined" &&
map.sourcesContent[i] !== null
? map.sourcesContent[i]
: undefined;
const skipReading = typeof originalSourceContent !== "undefined";
// We do not skipReading here, because we need absolute paths in sources.
// This is necessary so that for sourceMaps with the same file structure in sources, name collisions do not occur.
// https://github.com/webpack-contrib/source-map-loader/issues/51
let { sourceURL, sourceContent } = await fetchFromURL(
readResource,
context,
source,
map.sourceRoot,
skipReading
);
if (skipReading) {
sourceContent = originalSourceContent;
}
// Return original value of `source` when error happens
return { sourceURL, sourceContent };
}
)
);
/** @type {RawSourceMap} */
const newMap = { ...map };
newMap.sources = [];
newMap.sourcesContent = [];
delete newMap.sourceRoot;
for (const source of resolvedSources) {
const { sourceURL, sourceContent } = source;
newMap.sources.push(sourceURL || "");
newMap.sourcesContent.push(
sourceContent ? sourceContent.toString("utf8") : ""
);
}
const sourcesContentIsEmpty =
newMap.sourcesContent.filter(Boolean).length === 0;
if (sourcesContentIsEmpty) {
delete newMap.sourcesContent;
}
return {
source: input.replace(replacementString, ""),
sourceMap: /** @type {RawSourceMap} */ (newMap)
};
}
module.exports = extractSourceMap;
module.exports.getSourceMappingURL = getSourceMappingURL;

View File

@@ -11,6 +11,16 @@ const DONE_MARKER = 2;
const DONE_MAYBE_ROOT_CYCLE_MARKER = 3;
const DONE_AND_ROOT_MARKER = 4;
/**
* @template T
* @typedef {Set<Node<T>>} Nodes
*/
/**
* @template T
* @typedef {Set<Cycle<T>>} Cycles
*/
/**
* @template T
*/
@@ -20,7 +30,7 @@ class Node {
*/
constructor(item) {
this.item = item;
/** @type {Set<Node<T>>} */
/** @type {Nodes<T>} */
this.dependencies = new Set();
this.marker = NO_MARKER;
/** @type {Cycle<T> | undefined} */
@@ -34,7 +44,7 @@ class Node {
*/
class Cycle {
constructor() {
/** @type {Set<Node<T>>} */
/** @type {Nodes<T>} */
this.nodes = new Set();
}
}
@@ -75,13 +85,13 @@ module.exports = (items, getDependencies) => {
// Set of current root modules
// items will be removed if a new reference to it has been found
/** @type {Set<Node<T>>} */
/** @type {Nodes<T>} */
const roots = new Set();
// Set of current cycles without references to it
// cycles will be removed if a new reference to it has been found
// that is not part of the cycle
/** @type {Set<Cycle<T>>} */
/** @type {Cycles<T>} */
const rootCycles = new Set();
// For all non-marked nodes
@@ -201,7 +211,7 @@ module.exports = (items, getDependencies) => {
// inside of the cycle
for (const cycle of rootCycles) {
let max = 0;
/** @type {Set<Node<T>>} */
/** @type {Nodes<T>} */
const cycleRoots = new Set();
const nodes = cycle.nodes;
for (const node of nodes) {

37
node_modules/webpack/lib/util/fs.js generated vendored
View File

@@ -85,17 +85,17 @@ const path = require("path");
/** @typedef {Map<string, FileSystemInfoEntry | "ignore">} TimeInfoEntries */
/** @typedef {Set<string>} Changes */
/** @typedef {Set<string>} Removals */
/**
* @typedef {object} WatcherInfo
* @property {Set<string> | null} changes get current aggregated changes that have not yet send to callback
* @property {Set<string> | null} removals get current aggregated removals that have not yet send to callback
* @property {Changes | null} changes get current aggregated changes that have not yet send to callback
* @property {Removals | null} removals get current aggregated removals that have not yet send to callback
* @property {TimeInfoEntries} fileTimeInfoEntries get info about files
* @property {TimeInfoEntries} contextTimeInfoEntries get info about directories
*/
/** @typedef {Set<string>} Changes */
/** @typedef {Set<string>} Removals */
// TODO webpack 6 deprecate missing getInfo
/**
* @typedef {object} Watcher
@@ -345,16 +345,29 @@ const path = require("path");
* @typedef {(pathLike: PathLike, callback: NoParamCallback) => void} Unlink
*/
/**
* @typedef {FSImplementation & { read: (...args: EXPECTED_ANY[]) => EXPECTED_ANY }} CreateReadStreamFSImplementation
*/
/**
* @typedef {StreamOptions & { fs?: CreateReadStreamFSImplementation | null | undefined, end?: number | undefined }} ReadStreamOptions
*/
/**
* @typedef {(path: PathLike, options?: BufferEncoding | ReadStreamOptions) => NodeJS.ReadableStream} CreateReadStream
*/
/**
* @typedef {object} OutputFileSystem
* @property {WriteFile} writeFile
* @property {Mkdir} mkdir
* @property {Readdir=} readdir
* @property {Rmdir=} rmdir
* @property {WriteFile} writeFile
* @property {Unlink=} unlink
* @property {Stat} stat
* @property {LStat=} lstat
* @property {ReadFile} readFile
* @property {CreateReadStream=} createReadStream
* @property {((path1: string, path2: string) => string)=} join
* @property {((from: string, to: string) => string)=} relative
* @property {((dirname: string) => string)=} dirname
@@ -396,7 +409,7 @@ const path = require("path");
*/
/**
* @typedef {StreamOptions & { fs?: CreateWriteStreamFSImplementation | null | undefined }} WriteStreamOptions
* @typedef {StreamOptions & { fs?: CreateWriteStreamFSImplementation | null | undefined, flush?: boolean | undefined }} WriteStreamOptions
*/
/**
@@ -461,7 +474,7 @@ const path = require("path");
/** @typedef {InputFileSystem & OutputFileSystem & IntermediateFileSystemExtras} IntermediateFileSystem */
/**
* @param {InputFileSystem|OutputFileSystem|undefined} fs a file system
* @param {InputFileSystem | OutputFileSystem|undefined} fs a file system
* @param {string} rootPath the root path
* @param {string} targetPath the target path
* @returns {string} location of targetPath relative to rootPath
@@ -649,7 +662,15 @@ const lstatReadlinkAbsolute = (fs, p, callback) => {
doReadLink();
};
/**
* @param {string} pathname a path
* @returns {boolean} is absolute
*/
const isAbsolute = (pathname) =>
path.posix.isAbsolute(pathname) || path.win32.isAbsolute(pathname);
module.exports.dirname = dirname;
module.exports.isAbsolute = isAbsolute;
module.exports.join = join;
module.exports.lstatReadlinkAbsolute = lstatReadlinkAbsolute;
module.exports.mkdirp = mkdirp;

View File

@@ -8,6 +8,8 @@
const Hash = require("../Hash");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
class BatchedHash extends Hash {
/**
* @param {Hash} hash hash
@@ -21,9 +23,22 @@ class BatchedHash extends Hash {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string|Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (this.string !== undefined) {
@@ -35,7 +50,11 @@ class BatchedHash extends Hash {
this.string += data;
return this;
}
this.hash.update(this.string, this.encoding);
if (this.encoding) {
this.hash.update(this.string, this.encoding);
} else {
this.hash.update(this.string);
}
this.string = undefined;
}
if (typeof data === "string") {
@@ -46,8 +65,10 @@ class BatchedHash extends Hash {
) {
this.string = data;
this.encoding = inputEncoding;
} else {
} else if (inputEncoding) {
this.hash.update(data, inputEncoding);
} else {
this.hash.update(data);
}
} else {
this.hash.update(data);
@@ -57,12 +78,30 @@ class BatchedHash extends Hash {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string|Buffer} digest
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
if (this.string !== undefined) {
this.hash.update(this.string, this.encoding);
if (this.encoding) {
this.hash.update(this.string, this.encoding);
} else {
this.hash.update(this.string);
}
}
if (!encoding) {
return this.hash.digest();
}
return this.hash.digest(encoding);
}

View File

@@ -5,13 +5,15 @@
"use strict";
const Hash = require("../Hash");
// 65536 is the size of a wasm memory page
// 64 is the maximum chunk size for every possible wasm hash implementation
// 4 is the maximum number of bytes per char for string encoding (max is utf-8)
// ~3 makes sure that it's always a block of 4 chars, so avoid partially encoded bytes for base64
const MAX_SHORT_STRING = Math.floor((65536 - 64) / 4) & ~3;
class WasmHash {
class WasmHash extends Hash {
/**
* @param {WebAssembly.Instance} instance wasm instance
* @param {WebAssembly.Instance[]} instancesPool pool of instances
@@ -19,6 +21,8 @@ class WasmHash {
* @param {number} digestSize size of digest returned by wasm
*/
constructor(instance, instancesPool, chunkSize, digestSize) {
super();
const exports = /** @type {EXPECTED_ANY} */ (instance.exports);
exports.init();
this.exports = exports;
@@ -35,17 +39,39 @@ class WasmHash {
}
/**
* @param {Buffer | string} data data
* @param {BufferEncoding=} encoding encoding
* @returns {this} itself
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
update(data, encoding) {
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {string=} inputEncoding data encoding
* @returns {this} updated hash
*/
update(data, inputEncoding) {
if (typeof data === "string") {
while (data.length > MAX_SHORT_STRING) {
this._updateWithShortString(data.slice(0, MAX_SHORT_STRING), encoding);
this._updateWithShortString(
data.slice(0, MAX_SHORT_STRING),
/** @type {NodeJS.BufferEncoding} */
(inputEncoding)
);
data = data.slice(MAX_SHORT_STRING);
}
this._updateWithShortString(data, encoding);
this._updateWithShortString(
data,
/** @type {NodeJS.BufferEncoding} */
(inputEncoding)
);
return this;
}
this._updateWithBuffer(data);
@@ -136,17 +162,31 @@ class WasmHash {
}
/**
* @param {BufferEncoding} type type
* @returns {Buffer | string} digest
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @returns {Buffer} digest
*/
digest(type) {
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {string=} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {string=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
const { exports, buffered, mem, digestSize } = this;
exports.final(buffered);
this.instancesPool.push(this);
const hex = mem.toString("latin1", 0, digestSize);
if (type === "hex") return hex;
if (type === "binary" || !type) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(type);
if (encoding === "hex") return hex;
if (encoding === "binary" || !encoding) return Buffer.from(hex, "hex");
return Buffer.from(hex, "hex").toString(
/** @type {NodeJS.BufferEncoding} */ (encoding)
);
}
}

View File

@@ -6,7 +6,8 @@
"use strict";
/** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */
/** @typedef {string | number | undefined} SemVerRangeItem */
/** @typedef {string | number} VersionValue */
/** @typedef {VersionValue | undefined} SemVerRangeItem */
/** @typedef {(SemVerRangeItem | SemVerRangeItem[])[]} SemVerRange */
/**
@@ -16,12 +17,12 @@
const parseVersion = (str) => {
/**
* @param {str} str str
* @returns {(string | number)[]} result
* @returns {VersionValue[]} result
*/
var splitAndConvert = function (str) {
return str.split(".").map(function (item) {
// eslint-disable-next-line eqeqeq
return +item == /** @type {EXPECTED_ANY} */ (item) ? +item : item;
return +item == /** @type {string | number} */ (item) ? +item : item;
});
};
@@ -29,7 +30,7 @@ const parseVersion = (str) => {
/** @type {RegExpExecArray} */
(/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(str));
/** @type {(string | number | undefined | [])[]} */
/** @type {(VersionValue | undefined | [])[]} */
var ver = match[1] ? splitAndConvert(match[1]) : [];
if (match[2]) {
@@ -103,7 +104,7 @@ module.exports.versionLt = versionLt;
module.exports.parseRange = (str) => {
/**
* @param {string} str str
* @returns {(string | number)[]} result
* @returns {VersionValue[]} result
*/
const splitAndConvert = (str) => {
return str
@@ -429,7 +430,7 @@ const satisfy = (range, version) => {
/** @type {"s" | "n" | "u" | ""} */
(j < range.length ? (typeof range[j])[0] : "");
/** @type {number | string | undefined} */
/** @type {VersionValue | undefined} */
var versionValue;
/** @type {"n" | "s" | "u" | "o" | undefined} */
var versionType;
@@ -470,8 +471,8 @@ const satisfy = (range, version) => {
// Handles "cmp" cases
if (
negated
? versionValue > /** @type {(number | string)[]} */ (range)[j]
: versionValue < /** @type {(number | string)[]} */ (range)[j]
? versionValue > /** @type {VersionValue[]} */ (range)[j]
: versionValue < /** @type {VersionValue[]} */ (range)[j]
) {
return false;
}

View File

@@ -13,42 +13,53 @@
*/
/**
* @template T
* @template R
* @template I
* @template G
* @typedef {object} GroupConfig
* @property {(item: T) => string[] | undefined} getKeys
* @property {(key: string, children: (R | T)[], items: T[]) => R} createGroup
* @property {(name: string, items: T[]) => GroupOptions=} getOptions
* @property {(item: I) => string[] | undefined} getKeys
* @property {(name: string, items: I[]) => GroupOptions=} getOptions
* @property {(key: string, children: I[], items: I[]) => G} createGroup
*/
/**
* @template T
* @template R
* @template I
* @template G
* @typedef {{ config: GroupConfig<I, G>, name: string, alreadyGrouped: boolean, items: Items<I, G> | undefined }} Group
*/
/**
* @template I, G
* @typedef {Set<Group<I, G>>} Groups
*/
/**
* @template I
* @template G
* @typedef {object} ItemWithGroups
* @property {T} item
* @property {Set<Group<T, R>>} groups
* @property {I} item
* @property {Groups<I, G>} groups
*/
/**
* @template T
* @template R
* @typedef {{ config: GroupConfig<T, R>, name: string, alreadyGrouped: boolean, items: Set<ItemWithGroups<T, R>> | undefined }} Group
* @template T, G
* @typedef {Set<ItemWithGroups<T, G>>} Items
*/
/**
* @template T
* @template I
* @template G
* @template R
* @param {T[]} items the list of items
* @param {GroupConfig<T, R>[]} groupConfigs configuration
* @returns {(R | T)[]} grouped items
* @param {I[]} items the list of items
* @param {GroupConfig<I, G>[]} groupConfigs configuration
* @returns {(I | G)[]} grouped items
*/
const smartGrouping = (items, groupConfigs) => {
/** @type {Set<ItemWithGroups<T, R>>} */
/** @type {Items<I, G>} */
const itemsWithGroups = new Set();
/** @type {Map<string, Group<T, R>>} */
/** @type {Map<string, Group<I, G>>} */
const allGroups = new Map();
for (const item of items) {
/** @type {Set<Group<T, R>>} */
/** @type {Groups<I, G>} */
const groups = new Set();
for (let i = 0; i < groupConfigs.length; i++) {
const groupConfig = groupConfigs[i];
@@ -77,9 +88,10 @@ const smartGrouping = (items, groupConfigs) => {
groups
});
}
/**
* @param {Set<ItemWithGroups<T, R>>} itemsWithGroups input items with groups
* @returns {(T | R)[]} groups items
* @param {Items<I, G>} itemsWithGroups input items with groups
* @returns {(I | G)[]} groups items
*/
const runGrouping = (itemsWithGroups) => {
const totalSize = itemsWithGroups.size;
@@ -94,7 +106,7 @@ const smartGrouping = (items, groupConfigs) => {
}
}
}
/** @type {Map<Group<T, R>, { items: Set<ItemWithGroups<T, R>>, options: GroupOptions | false | undefined, used: boolean }>} */
/** @type {Map<Group<I, G>, { items: Items<I, G>, options: GroupOptions | false | undefined, used: boolean }>} */
const groupMap = new Map();
for (const group of allGroups.values()) {
if (group.items) {
@@ -107,13 +119,15 @@ const smartGrouping = (items, groupConfigs) => {
});
}
}
/** @type {(T | R)[]} */
/** @type {(I | G)[]} */
const results = [];
for (;;) {
/** @type {Group<T, R> | undefined} */
/** @type {Group<I, G> | undefined} */
let bestGroup;
let bestGroupSize = -1;
/** @type {Items<I, G> | undefined} */
let bestGroupItems;
/** @type {GroupOptions | false | undefined} */
let bestGroupOptions;
for (const [group, state] of groupMap) {
const { items, used } = state;
@@ -192,8 +206,9 @@ const smartGrouping = (items, groupConfigs) => {
bestGroup.alreadyGrouped = true;
const children = groupChildren ? runGrouping(items) : allItems;
bestGroup.alreadyGrouped = false;
results.push(groupConfig.createGroup(key, children, allItems));
results.push(
groupConfig.createGroup(key, /** @type {I[]} */ (children), allItems)
);
}
for (const { item } of itemsWithGroups) {
results.push(item);

View File

@@ -0,0 +1,45 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
/** @typedef {import("../javascript/JavascriptParser").DestructuringAssignmentProperties} DestructuringAssignmentProperties */
/** @typedef {import("../javascript/JavascriptParser").DestructuringAssignmentProperty} DestructuringAssignmentProperty */
/**
* Deep first traverse the properties of a destructuring assignment.
* @param {DestructuringAssignmentProperties} properties destructuring assignment properties
* @param {((stack: DestructuringAssignmentProperty[]) => void) | undefined=} onLeftNode on left node callback
* @param {((stack: DestructuringAssignmentProperty[]) => void) | undefined=} enterNode enter node callback
* @param {((stack: DestructuringAssignmentProperty[]) => void) | undefined=} exitNode exit node callback
* @param {DestructuringAssignmentProperty[] | undefined=} stack stack of the walking nodes
*/
function traverseDestructuringAssignmentProperties(
properties,
onLeftNode,
enterNode,
exitNode,
stack = []
) {
for (const property of properties) {
stack.push(property);
if (enterNode) enterNode(stack);
if (property.pattern) {
traverseDestructuringAssignmentProperties(
property.pattern,
onLeftNode,
enterNode,
exitNode,
stack
);
} else if (onLeftNode) {
onLeftNode(stack);
}
if (exitNode) exitNode(stack);
stack.pop();
}
}
module.exports = traverseDestructuringAssignmentProperties;