Update npm packages (73 packages including @jqhtml 2.3.36)

Update npm registry domain from privatenpm.hanson.xyz to npm.internal.hanson.xyz

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2026-02-20 11:31:28 +00:00
parent d01a6179aa
commit b5eb27a827
1690 changed files with 47348 additions and 16848 deletions

View File

@@ -28,6 +28,7 @@ var import_workerHost = require("./workerHost");
var import_ipc = require("../common/ipc");
var import_internalReporter = require("../reporters/internalReporter");
var import_util = require("../util");
var import_storage = require("./storage");
class Dispatcher {
constructor(config, reporter, failureTracker) {
this._workerSlots = [];
@@ -197,6 +198,12 @@ class Dispatcher {
const producedEnv = this._producedEnvByProjectId.get(testGroup.projectId) || {};
this._producedEnvByProjectId.set(testGroup.projectId, { ...producedEnv, ...worker.producedEnv() });
});
worker.onRequest("cloneStorage", async (params) => {
return await import_storage.Storage.clone(params.storageFile, outputDir);
});
worker.onRequest("upstreamStorage", async (params) => {
await import_storage.Storage.upstream(params.storageFile, params.storageOutFile);
});
return worker;
}
producedEnvByProjectId() {
@@ -458,11 +465,15 @@ class JobDispatcher {
];
}
_onTestPaused(worker, params) {
const data = this._dataByTestId.get(params.testId);
if (!data)
return;
const { result, test } = data;
const sendMessage = async (message) => {
try {
if (this.jobResult.isDone())
throw new Error("Test has already stopped");
const response = await worker.sendCustomMessage({ testId: params.testId, request: message.request });
const response = await worker.sendCustomMessage({ testId: test.id, request: message.request });
if (response.error)
(0, import_internalReporter.addLocationAndSnippetToError)(this._config.config, response.error);
return response;
@@ -472,8 +483,12 @@ class JobDispatcher {
return { response: void 0, error };
}
};
for (const error of params.errors)
(0, import_internalReporter.addLocationAndSnippetToError)(this._config.config, error);
result.status = params.status;
result.errors = params.errors;
result.error = result.errors[0];
void this._reporter.onTestPaused?.(test, result).then(() => {
worker.sendResume({});
});
this._failureTracker.onTestPaused?.({ ...params, sendMessage });
}
skipWholeJob() {

View File

@@ -162,7 +162,7 @@ async function createRootSuite(testRun, errors, shouldFilterOnly) {
for (const group of (0, import_testGroups.createTestGroups)(projectSuite, config.config.shard.total))
testGroups.push(group);
}
const testGroupsInThisShard = (0, import_testGroups.filterForShard)(config.config.shard, testGroups);
const testGroupsInThisShard = (0, import_testGroups.filterForShard)(config.config.shard, config.configCLIOverrides.shardWeights, testGroups);
const testsInThisShard = /* @__PURE__ */ new Set();
for (const group of testGroupsInThisShard) {
for (const test of group.tests)
@@ -317,7 +317,7 @@ async function loadTestList(config, filePath) {
const relativeFile = (0, import_utils.toPosixPath)(import_path.default.relative(config.config.rootDir, test.location.file));
if (relativeFile !== d.file)
return false;
return d.titlePath.length === titles.length && d.titlePath.every((_, index) => titles[index] === d.titlePath[index]);
return d.titlePath.length <= titles.length && d.titlePath.every((_, index) => titles[index] === d.titlePath[index]);
});
} catch (e) {
throw (0, import_util.errorWithFile)(filePath, "Cannot read test list file: " + e.message);

View File

@@ -44,6 +44,7 @@ class ProcessHost extends import_events.EventEmitter {
this._lastMessageId = 0;
this._callbacks = /* @__PURE__ */ new Map();
this._producedEnv = {};
this._requestHandlers = /* @__PURE__ */ new Map();
this._runnerScript = runnerScript;
this._processName = processName;
this._extraEnv = env;
@@ -51,6 +52,9 @@ class ProcessHost extends import_events.EventEmitter {
async startRunner(runnerParams, options = {}) {
(0, import_utils.assert)(!this.process, "Internal error: starting the same process twice");
this.process = import_child_process.default.fork(require.resolve("../common/process"), {
// Note: we pass detached:false, so that workers are in the same process group.
// This way Ctrl+C or a kill command can shutdown all workers in case they misbehave.
// Otherwise user can end up with a bunch of workers stuck in a busy loop without self-destructing.
detached: false,
env: {
...process.env,
@@ -92,6 +96,18 @@ class ProcessHost extends import_events.EventEmitter {
} else {
this.emit(method, params);
}
} else if (message.method === "__request__") {
const { id, method, params } = message.params;
const handler = this._requestHandlers.get(method);
if (!handler) {
this.send({ method: "__response__", params: { id, error: { message: "Unknown method" } } });
} else {
handler(params).then((result) => {
this.send({ method: "__response__", params: { id, result } });
}).catch((error2) => {
this.send({ method: "__response__", params: { id, error: { message: error2.message } } });
});
}
} else {
this.emit(message.method, message.params);
}
@@ -135,6 +151,9 @@ class ProcessHost extends import_events.EventEmitter {
}
async onExit() {
}
onRequest(method, handler) {
this._requestHandlers.set(method, handler);
}
async stop() {
if (!this._processDidExit && !this._didSendStop) {
this.send({ method: "__stop__" });

View File

@@ -145,7 +145,7 @@ function buildDependentProjects(forProjects, projects) {
return result;
}
async function collectFilesForProject(project, fsCache = /* @__PURE__ */ new Map()) {
const extensions = /* @__PURE__ */ new Set([".js", ".ts", ".mjs", ".mts", ".cjs", ".cts", ".jsx", ".tsx", ".mjsx", ".mtsx", ".cjsx", ".ctsx"]);
const extensions = /* @__PURE__ */ new Set([".js", ".ts", ".mjs", ".mts", ".cjs", ".cts", ".jsx", ".tsx", ".mjsx", ".mtsx", ".cjsx", ".ctsx", ".md"]);
const testFileExtension = (file) => extensions.has(import_path.default.extname(file));
const allFiles = await cachedCollectFiles(project.project.testDir, project.respectGitIgnore, fsCache);
const testMatch = (0, import_util2.createFileMatcher)(project.project.testMatch);

91
node_modules/playwright/lib/runner/storage.js generated vendored Normal file
View File

@@ -0,0 +1,91 @@
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var storage_exports = {};
__export(storage_exports, {
Storage: () => Storage
});
module.exports = __toCommonJS(storage_exports);
var import_fs = __toESM(require("fs"));
var import_path = __toESM(require("path"));
var import_utils = require("playwright-core/lib/utils");
class Storage {
static {
this._storages = /* @__PURE__ */ new Map();
}
static {
this._serializeQueue = Promise.resolve();
}
static clone(storageFile, outputDir) {
return Storage._withStorage(storageFile, (storage) => storage._clone(outputDir));
}
static upstream(storageFile, storageOutFile) {
return Storage._withStorage(storageFile, (storage) => storage._upstream(storageOutFile));
}
static _withStorage(fileName, runnable) {
this._serializeQueue = this._serializeQueue.then(() => {
let storage = Storage._storages.get(fileName);
if (!storage) {
storage = new Storage(fileName);
Storage._storages.set(fileName, storage);
}
return runnable(storage);
});
return this._serializeQueue;
}
constructor(fileName) {
this._fileName = fileName;
}
async _clone(outputDir) {
const entries = await this._load();
if (this._lastSnapshotFileName)
return this._lastSnapshotFileName;
const snapshotFile = import_path.default.join(outputDir, `pw-storage-${(0, import_utils.createGuid)()}.json`);
await import_fs.default.promises.writeFile(snapshotFile, JSON.stringify(entries, null, 2)).catch(() => {
});
this._lastSnapshotFileName = snapshotFile;
return snapshotFile;
}
async _upstream(storageOutFile) {
const entries = await this._load();
const newEntries = await import_fs.default.promises.readFile(storageOutFile, "utf8").then(JSON.parse).catch(() => ({}));
for (const [key, newValue] of Object.entries(newEntries))
entries[key] = newValue;
this._lastSnapshotFileName = void 0;
await import_fs.default.promises.writeFile(this._fileName, JSON.stringify(entries, null, 2));
}
async _load() {
if (!this._entriesPromise)
this._entriesPromise = import_fs.default.promises.readFile(this._fileName, "utf8").then(JSON.parse).catch(() => ({}));
return this._entriesPromise;
}
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
Storage
});

View File

@@ -92,15 +92,23 @@ function createTestGroups(projectSuite, expectedParallelism) {
}
return result;
}
function filterForShard(shard, testGroups) {
function filterForShard(shard, weights, testGroups) {
weights ??= Array.from({ length: shard.total }, () => 1);
if (weights.length !== shard.total)
throw new Error(`PWTEST_SHARD_WEIGHTS number of weights must match the shard total of ${shard.total}`);
const totalWeight = weights.reduce((a, b) => a + b, 0);
let shardableTotal = 0;
for (const group of testGroups)
shardableTotal += group.tests.length;
const shardSize = Math.floor(shardableTotal / shard.total);
const extraOne = shardableTotal - shardSize * shard.total;
const currentShard = shard.current - 1;
const from = shardSize * currentShard + Math.min(extraOne, currentShard);
const to = from + shardSize + (currentShard < extraOne ? 1 : 0);
const shardSizes = weights.map((w) => Math.floor(w * shardableTotal / totalWeight));
const remainder = shardableTotal - shardSizes.reduce((a, b) => a + b, 0);
for (let i = 0; i < remainder; i++) {
shardSizes[i % shardSizes.length]++;
}
let from = 0;
for (let i = 0; i < shard.current - 1; i++)
from += shardSizes[i];
const to = from + shardSizes[shard.current - 1];
let current = 0;
const result = /* @__PURE__ */ new Set();
for (const group of testGroups) {

View File

@@ -63,6 +63,7 @@ class TestRunner extends import_events.default {
this._queue = Promise.resolve();
this._watchTestDirs = false;
this._populateDependenciesOnList = false;
this._startingEnv = {};
this.configLocation = configLocation;
this._configCLIOverrides = configCLIOverrides;
this._watcher = new import_fsWatcher.Watcher((events) => {
@@ -75,6 +76,7 @@ class TestRunner extends import_events.default {
(0, import_utils.setPlaywrightTestProcessEnv)();
this._watchTestDirs = !!params.watchTestDirs;
this._populateDependenciesOnList = !!params.populateDependenciesOnList;
this._startingEnv = { ...process.env };
}
resizeTerminal(params) {
process.stdout.columns = params.cols;
@@ -107,15 +109,20 @@ class TestRunner extends import_events.default {
const reporter = new import_internalReporter.InternalReporter(userReporters);
const config = await this._loadConfigOrReportError(reporter, this._configCLIOverrides);
if (!config)
return { status: "failed" };
return { status: "failed", env: [] };
const { status, cleanup } = await (0, import_tasks.runTasksDeferCleanup)(new import_tasks.TestRun(config, reporter), [
...(0, import_tasks.createGlobalSetupTasks)(config)
]);
const env = [];
for (const key of /* @__PURE__ */ new Set([...Object.keys(process.env), ...Object.keys(this._startingEnv)])) {
if (this._startingEnv[key] !== process.env[key])
env.push([key, process.env[key] ?? null]);
}
if (status !== "passed")
await cleanup();
else
this._globalSetup = { cleanup };
return { status };
return { status, env };
}
async runGlobalTeardown() {
const globalSetup = this._globalSetup;
@@ -251,6 +258,7 @@ class TestRunner extends import_events.default {
},
...params.updateSnapshots ? { updateSnapshots: params.updateSnapshots } : {},
...params.updateSourceMethod ? { updateSourceMethod: params.updateSourceMethod } : {},
...params.runAgents ? { runAgents: params.runAgents } : {},
...params.workers ? { workers: params.workers } : {}
};
const config = await this._loadConfigOrReportError(new import_internalReporter.InternalReporter([userReporter]), overrides);
@@ -258,7 +266,7 @@ class TestRunner extends import_events.default {
return { status: "failed" };
config.cliListOnly = false;
config.cliPassWithNoTests = true;
config.cliArgs = params.locations || [];
config.cliArgs = params.locations;
config.cliGrep = params.grep;
config.cliGrepInvert = params.grepInvert;
config.cliProjectFilter = params.projects?.length ? params.projects : void 0;
@@ -376,7 +384,8 @@ async function runAllTestsWithConfig(config) {
(0, import_tasks.createLoadTask)("in-process", { filterOnly: true, failOnLoadErrors: true }),
...(0, import_tasks.createRunTestsTasks)(config)
];
const status = await (0, import_tasks.runTasks)(new import_tasks.TestRun(config, reporter), tasks, config.config.globalTimeout);
const testRun = new import_tasks.TestRun(config, reporter, { pauseAtEnd: config.configCLIOverrides.pause, pauseOnError: config.configCLIOverrides.pause });
const status = await (0, import_tasks.runTasks)(testRun, tasks, config.config.globalTimeout);
await new Promise((resolve) => process.stdout.write("", () => resolve()));
await new Promise((resolve) => process.stderr.write("", () => resolve()));
return status;

View File

@@ -114,8 +114,8 @@ class TestServerDispatcher {
async runGlobalSetup(params) {
const { reporter, report } = await this._collectingReporter();
this._globalSetupReport = report;
const { status } = await this._testRunner.runGlobalSetup([reporter, new import_list.default()]);
return { report, status };
const { status, env } = await this._testRunner.runGlobalSetup([reporter, new import_list.default()]);
return { report, status, env };
}
async runGlobalTeardown() {
const { status } = await this._testRunner.runGlobalTeardown();

View File

@@ -267,7 +267,8 @@ async function runTests(watchOptions, testServerConnection, options) {
await testServerConnection.runTests({
grep: watchOptions.grep,
testIds: options?.testIds,
locations: watchOptions?.files,
locations: watchOptions?.files ?? [],
// TODO: always collect locations based on knowledge about tree, so that we don't have to load all tests
projects: watchOptions.projects,
connectWsEndpoint,
reuseContext: connectWsEndpoint ? true : void 0,

View File

@@ -61,6 +61,9 @@ class WorkerHost extends import_processHost.ProcessHost {
pauseAtEnd: options.pauseAtEnd
};
}
artifactsDir() {
return this._params.artifactsDir;
}
async start() {
await import_fs.default.promises.mkdir(this._params.artifactsDir, { recursive: true });
return await this.startRunner(this._params, {
@@ -82,6 +85,9 @@ class WorkerHost extends import_processHost.ProcessHost {
async sendCustomMessage(payload) {
return await this.sendMessage({ method: "customMessage", params: payload });
}
sendResume(payload) {
this.sendMessageNoReply({ method: "resume", params: payload });
}
hash() {
return this._hash;
}