refactor: move brother_node development artifact to dev/test-nodes subdirectory
Development Artifact Cleanup: ✅ BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location - dev/test-nodes/brother_node/: Moved from root directory for better organization - Contains development configuration, test logs, and test chain data - No impact on production systems - purely development/testing artifact ✅ DEVELOPMENT ARTIFACTS IDENTIFIED: - Chain ID: aitbc-brother-chain (test/development chain) - Ports: 8010 (P2P) and 8011 (RPC) - different from production - Environment: .env file with test configuration - Logs: rpc.log and node.log from development testing session (March 15, 2026) ✅ ROOT DIRECTORY CLEANUP: Removed development clutter from production directory - brother_node/ moved to dev/test-nodes/brother_node/ - Root directory now contains only production-ready components - Development artifacts properly organized in dev/ subdirectory DIRECTORY STRUCTURE IMPROVEMENT: 📁 dev/test-nodes/: Development and testing node configurations 🏗️ Root Directory: Clean production structure with only essential components 🧪 Development Isolation: Test environments separated from production BENEFITS: ✅ Clean Production Directory: No development artifacts in root ✅ Better Organization: Development nodes grouped in dev/ subdirectory ✅ Clear Separation: Production vs development environments clearly distinguished ✅ Maintainability: Easier to identify and manage development components RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
62
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bigint.ts
generated
vendored
Executable file
62
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bigint.ts
generated
vendored
Executable file
@@ -0,0 +1,62 @@
|
||||
import { InvalidParameterError } from "./common-errors.js";
|
||||
import { unreachable } from "./error.js";
|
||||
|
||||
/**
|
||||
* Returns the minimum of two bigints.
|
||||
*
|
||||
* @param x The first number to compare.
|
||||
* @param y The second number to compare.
|
||||
* @returns The smaller of the two numbers.
|
||||
*/
|
||||
export function min(x: bigint, y: bigint): bigint {
|
||||
return x < y ? x : y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the maximum of two bigints.
|
||||
*
|
||||
* @param x The first number to compare.
|
||||
* @param y The second number to compare.
|
||||
* @returns The larger of the two numbers.
|
||||
*/
|
||||
export function max(x: bigint, y: bigint): bigint {
|
||||
return x > y ? x : y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a value to a bigint.
|
||||
*
|
||||
* This function supports several types of input:
|
||||
* - `number`: Must be an integer and a safe integer. If it's not, an error is thrown.
|
||||
* - `bigint`: Returned as is.
|
||||
* - `string`: Converted to a bigint using the BigInt constructor.
|
||||
*
|
||||
* If the input is of an unsupported type, an error is thrown.
|
||||
*
|
||||
* @param value The value to convert to a bigint.
|
||||
* @returns The input value converted to a bigint.
|
||||
* @throws InvalidParameterError If the input value cannot be converted to a bigint.
|
||||
*/
|
||||
export function toBigInt(value: number | string | bigint): bigint {
|
||||
// eslint-disable-next-line @typescript-eslint/switch-exhaustiveness-check -- The other types will throw an error
|
||||
switch (typeof value) {
|
||||
case "number":
|
||||
if (!Number.isInteger(value)) {
|
||||
throw new InvalidParameterError(`${value} is not an integer`);
|
||||
}
|
||||
if (!Number.isSafeInteger(value)) {
|
||||
throw new InvalidParameterError(
|
||||
`Integer ${value} is unsafe. Consider using ${value}n instead. For more details, see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger`,
|
||||
);
|
||||
}
|
||||
// `break;` intentionally omitted. fallthrough desired.
|
||||
case "string":
|
||||
case "bigint":
|
||||
return BigInt(value);
|
||||
default:
|
||||
unreachable(
|
||||
value,
|
||||
new InvalidParameterError(`Unsupported type: ${typeof value}`),
|
||||
);
|
||||
}
|
||||
}
|
||||
90
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bytecode.ts
generated
vendored
Executable file
90
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bytecode.ts
generated
vendored
Executable file
@@ -0,0 +1,90 @@
|
||||
import type { PrefixedHexString } from "./hex.js";
|
||||
import type {
|
||||
Artifact,
|
||||
LibraryAddresses,
|
||||
LibraryLink,
|
||||
} from "./internal/bytecode.js";
|
||||
|
||||
import { getPrefixedHexString, getUnprefixedHexString } from "./hex.js";
|
||||
import {
|
||||
checkAmbiguousOrUnnecessaryLinks,
|
||||
checkMissingLibraryAddresses,
|
||||
checkOverlappingLibraryNames,
|
||||
checkProvidedLibraryAddresses,
|
||||
} from "./internal/bytecode.js";
|
||||
|
||||
/**
|
||||
* Resolves the linked bytecode for a given contract artifact by substituting
|
||||
* the required library placeholders within the bytecode with the provided
|
||||
* library addresses.
|
||||
*
|
||||
* @param artifact The contract artifact containing the bytecode and link references.
|
||||
* @param providedLibraries An object containing library names as keys and their addresses as values.
|
||||
* @returns The linked bytecode with all required libraries correctly linked.
|
||||
* @throws InvalidLibraryAddressError If any provided library address is invalid.
|
||||
* @throws AmbiguousLibraryNameError If any provided library name matches multiple needed libraries.
|
||||
* @throws UnnecessaryLibraryError If any provided library name is not needed by the contract.
|
||||
* @throws OverlappingLibrariesError If any library is provided more than once.
|
||||
* @throws MissingLibrariesError If any needed library address is missing.
|
||||
*/
|
||||
export function resolveLinkedBytecode(
|
||||
artifact: Artifact,
|
||||
providedLibraries: LibraryAddresses,
|
||||
): PrefixedHexString {
|
||||
checkProvidedLibraryAddresses(providedLibraries);
|
||||
|
||||
const neededLibraries: LibraryLink[] = [];
|
||||
for (const [sourceName, sourceLibraries] of Object.entries(
|
||||
artifact.linkReferences,
|
||||
)) {
|
||||
for (const libraryName of Object.keys(sourceLibraries)) {
|
||||
const libraryFqn = `${sourceName}:${libraryName}`;
|
||||
const address =
|
||||
providedLibraries[libraryFqn] ?? providedLibraries[libraryName];
|
||||
|
||||
neededLibraries.push({
|
||||
sourceName,
|
||||
libraryName,
|
||||
libraryFqn,
|
||||
address,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
checkAmbiguousOrUnnecessaryLinks(providedLibraries, neededLibraries);
|
||||
checkOverlappingLibraryNames(providedLibraries, neededLibraries);
|
||||
checkMissingLibraryAddresses(neededLibraries);
|
||||
|
||||
return linkBytecode(artifact, neededLibraries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Links the bytecode of a contract artifact with the provided library addresses.
|
||||
* This function does not perform any validation on the provided libraries.
|
||||
*
|
||||
* @param artifact The contract artifact containing the bytecode and link references.
|
||||
* @param libraries An array of LibraryLink objects representing the libraries to be linked.
|
||||
* @returns The linked bytecode with all provided libraries correctly linked.
|
||||
*/
|
||||
export function linkBytecode(
|
||||
artifact: Artifact,
|
||||
libraries: LibraryLink[],
|
||||
): PrefixedHexString {
|
||||
const { bytecode, linkReferences } = artifact;
|
||||
let linkedBytecode = bytecode;
|
||||
|
||||
for (const { sourceName, libraryName, address } of libraries) {
|
||||
const contractLinkReferences =
|
||||
linkReferences[sourceName]?.[libraryName] ?? [];
|
||||
const unprefixedAddress = getUnprefixedHexString(address);
|
||||
|
||||
for (const { start, length } of contractLinkReferences) {
|
||||
linkedBytecode =
|
||||
linkedBytecode.substring(0, 2 + start * 2) +
|
||||
unprefixedAddress +
|
||||
linkedBytecode.substring(2 + (start + length) * 2);
|
||||
}
|
||||
}
|
||||
|
||||
return getPrefixedHexString(linkedBytecode);
|
||||
}
|
||||
51
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bytes.ts
generated
vendored
Executable file
51
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/bytes.ts
generated
vendored
Executable file
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* Checks if a value is an instance of Uint8Array.
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is a Uint8Array, false otherwise.
|
||||
*/
|
||||
export function isBytes(value: unknown): value is Uint8Array {
|
||||
return value instanceof Uint8Array;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pads a Uint8Array with zeros on the left to a specified length, or truncates
|
||||
* it from the left if it's too long.
|
||||
*
|
||||
* @param bytes The Uint8Array to pad or truncate.
|
||||
* @param length The desired length of the Uint8Array.
|
||||
* @returns The padded or truncated Uint8Array.
|
||||
*/
|
||||
export function setLengthLeft(bytes: Uint8Array, length: number): Uint8Array {
|
||||
if (bytes.length < length) {
|
||||
const padded = new Uint8Array(length);
|
||||
padded.set(bytes, length - bytes.length);
|
||||
return padded;
|
||||
}
|
||||
|
||||
return bytes.subarray(-length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if two Uint8Arrays are equal.
|
||||
*
|
||||
* @param x The first Uint8Array to compare.
|
||||
* @param y The second Uint8Array to compare.
|
||||
* @returns True if the Uint8Arrays are equal, false otherwise.
|
||||
*/
|
||||
export function equalsBytes(x: Uint8Array, y: Uint8Array): boolean {
|
||||
return x.length === y.length && x.every((xVal, i) => xVal === y[i]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a UTF-8 encoded string into a byte array.
|
||||
*
|
||||
* @param utf8String The UTF-8 encoded string to convert to bytes.
|
||||
* @returns A Uint8Array representing the byte sequence of the input UTF-8 string.
|
||||
*/
|
||||
export function utf8StringToBytes(utf8String: string): Uint8Array {
|
||||
return new TextEncoder().encode(utf8String);
|
||||
}
|
||||
|
||||
export { bytesToBigInt, bytesToNumber, numberToBytes } from "./number.js";
|
||||
export { bytesToHexString, hexStringToBytes } from "./hex.js";
|
||||
24
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/ci.ts
generated
vendored
Executable file
24
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/ci.ts
generated
vendored
Executable file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* IMPORTANT: A copy of this function exists in `v-next/hardhat-node-test-reporter/src/ci.ts`.
|
||||
* If you change this function, you must also change the copy.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Checks whether the current process is running in a CI environment.
|
||||
*
|
||||
* @returns True if the current process is running in a CI environment.
|
||||
*/
|
||||
export function isCi(): boolean {
|
||||
const env = process.env;
|
||||
|
||||
return (
|
||||
env.GITHUB_ACTIONS !== undefined || // GitHub Actions
|
||||
env.NOW !== undefined || // Vercel Now
|
||||
env.DEPLOYMENT_ID !== undefined || // Vercel Now
|
||||
env.CODEBUILD_BUILD_NUMBER !== undefined || // AWS CodeBuild
|
||||
env.CI !== undefined || // Travis CI, CircleCI, Cirrus CI, GitLab CI, Appveyor, CodeShip, dsari
|
||||
env.CONTINUOUS_INTEGRATION !== undefined || // Travis CI, Cirrus CI
|
||||
env.BUILD_NUMBER !== undefined || // Jenkins, TeamCity
|
||||
env.RUN_ID !== undefined // TaskCluster, dsari
|
||||
);
|
||||
}
|
||||
7
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/common-errors.ts
generated
vendored
Executable file
7
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/common-errors.ts
generated
vendored
Executable file
@@ -0,0 +1,7 @@
|
||||
import { CustomError } from "./error.js";
|
||||
|
||||
export class InvalidParameterError extends CustomError {
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
46
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/crypto.ts
generated
vendored
Executable file
46
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/crypto.ts
generated
vendored
Executable file
@@ -0,0 +1,46 @@
|
||||
import { keccak256 as keccak256Impl } from "ethereum-cryptography/keccak";
|
||||
import { sha256 as sha256Impl } from "ethereum-cryptography/sha256";
|
||||
|
||||
/**
|
||||
* Computes the Keccak-256 hash of the input bytes.
|
||||
*
|
||||
* @param bytes The input bytes to hash.
|
||||
* @returns The Keccak-256 hash of the input bytes.
|
||||
*/
|
||||
export async function keccak256(bytes: Uint8Array): Promise<Uint8Array> {
|
||||
return keccak256Impl(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the SHA-256 hash of the input bytes.
|
||||
*
|
||||
* @param bytes The input bytes to hash.
|
||||
* @returns The SHA-256 hash of the input bytes.
|
||||
*/
|
||||
export async function sha256(bytes: Uint8Array): Promise<Uint8Array> {
|
||||
return sha256Impl(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a non-cryptographic hash-based identifier for the given input.
|
||||
*
|
||||
* This function is primarily intended for generating unique identifiers from
|
||||
* a given input string.
|
||||
* It uses the SHA-1 hash algorithm, which is not cryptographically secure, but
|
||||
* is sufficient for this use case as long as the input is not generated by an
|
||||
* attacker.
|
||||
*
|
||||
* Note: The exact algorithm used (SHA-1) is not crucial for the function's
|
||||
* purpose of generating unique identifiers, and could be replaced if needed.
|
||||
*
|
||||
* @param data The input string to be hashed.
|
||||
* @returns The SHA-1 hash of the input string, represented as a
|
||||
* hexadecimal string.
|
||||
*/
|
||||
export async function createNonCryptographicHashId(
|
||||
data: string,
|
||||
): Promise<string> {
|
||||
const message = new TextEncoder().encode(data);
|
||||
const buffer = await crypto.subtle.digest("SHA-1", message);
|
||||
return Buffer.from(buffer).toString("hex");
|
||||
}
|
||||
28
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/date.ts
generated
vendored
Executable file
28
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/date.ts
generated
vendored
Executable file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Converts a string, number, or Date object to a Unix timestamp (seconds since the Unix Epoch).
|
||||
*
|
||||
* @param value The string to convert.
|
||||
* @returns The Unix timestamp.
|
||||
*/
|
||||
export function toSeconds(value: string | number | Date): number {
|
||||
return Math.floor(new Date(value).getTime() / 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Unix timestamp to a Date object.
|
||||
*
|
||||
* @param timestamp The Unix timestamp to convert.
|
||||
* @returns The Date object.
|
||||
*/
|
||||
export function secondsToDate(timestamp: number): Date {
|
||||
return new Date(timestamp * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current Unix timestamp (seconds since the Unix Epoch).
|
||||
*
|
||||
* @returns The current Unix timestamp.
|
||||
*/
|
||||
export function now(): number {
|
||||
return Math.floor(Date.now() / 1000);
|
||||
}
|
||||
38
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/debug.ts
generated
vendored
Executable file
38
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/debug.ts
generated
vendored
Executable file
@@ -0,0 +1,38 @@
|
||||
import debugLib from "debug";
|
||||
|
||||
/**
|
||||
* A simple decorator that adds debug logging for when a method is entered and exited.
|
||||
*
|
||||
* This decorator is meant to be used for debugging purposes only. It should not be committed in runtime code.
|
||||
*
|
||||
* Example usage:
|
||||
*
|
||||
* ```
|
||||
* class MyClass {
|
||||
* @withDebugLogs("MyClass:exampleClassMethod")
|
||||
* public function exampleClassMethod(...)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function withDebugLogs<This, Args extends any[], Return>(
|
||||
tag: string = "",
|
||||
) {
|
||||
return function actualDecorator(
|
||||
originalMethod: (this: This, ...args: Args) => Return,
|
||||
_context: ClassMethodDecoratorContext<
|
||||
This,
|
||||
(this: This, ...args: Args) => Return
|
||||
>,
|
||||
): (this: This, ...args: Args) => Return {
|
||||
const log = debugLib(`hardhat:dev:core${tag === "" ? "" : `:${tag}`}`);
|
||||
|
||||
function replacementMethod(this: This, ...args: Args): Return {
|
||||
log(`Entering method with args:`, args);
|
||||
const result = originalMethod.call(this, ...args);
|
||||
log(`Exiting method.`);
|
||||
return result;
|
||||
}
|
||||
|
||||
return replacementMethod;
|
||||
};
|
||||
}
|
||||
33
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/env.ts
generated
vendored
Executable file
33
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/env.ts
generated
vendored
Executable file
@@ -0,0 +1,33 @@
|
||||
import { camelToSnakeCase } from "./string.js";
|
||||
|
||||
/**
|
||||
* Sets the resolved global options as environment variables.
|
||||
*
|
||||
* @param globalOptions An object containing the resolved global options,
|
||||
* with each option adhering to its definition in the globalOptionDefinitions.
|
||||
*/
|
||||
export function setGlobalOptionsAsEnvVariables<
|
||||
T extends Record<keyof T, string | boolean>,
|
||||
>(globalOptions: T): void {
|
||||
for (const [name, value] of Object.entries(globalOptions)) {
|
||||
const envName = getEnvVariableNameFromGlobalOption(name);
|
||||
|
||||
if (value !== undefined) {
|
||||
process.env[envName] = String(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a global option name to its corresponding environment variable name.
|
||||
* The conversion involves transforming the option name from camelCase to
|
||||
* SNAKE_CASE and prefixing it with "HARDHAT_".
|
||||
*
|
||||
* @param globalOptionName The name of the global option in camelCase.
|
||||
*
|
||||
* @returns The corresponding environment variable name in the format
|
||||
* "HARDHAT_<OPTION_NAME_IN_SNAKE_CASE>".
|
||||
*/
|
||||
export function getEnvVariableNameFromGlobalOption(globalOptionName: string) {
|
||||
return `HARDHAT_${camelToSnakeCase(globalOptionName).toUpperCase()}`;
|
||||
}
|
||||
129
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/error.ts
generated
vendored
Executable file
129
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/error.ts
generated
vendored
Executable file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Abstract custom error class, which inherits from the built-in Error class,
|
||||
* making sure that the standard error properties are set, and the stack trace
|
||||
* is not polluted with the custom error class' code.
|
||||
*
|
||||
* This class supports the `cause` property, which can be used to pass the
|
||||
* original error that caused the custom error to be thrown. Note that it needs
|
||||
* to be an instance of the built-in Error class, or a subclass of it. See `ensureError`
|
||||
* for a convenient way of using it.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* class MyCustomError extends CustomError {
|
||||
* }
|
||||
*
|
||||
* try {
|
||||
* mayThrow();
|
||||
* } catch (error) {
|
||||
* ensureError(error);
|
||||
* throw new MyCustomError('Something went wrong', error);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export abstract class CustomError extends Error {
|
||||
public override stack!: string;
|
||||
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(message, cause !== undefined ? { cause } : undefined);
|
||||
this.name = this.constructor.name;
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided value is an instance of an error.
|
||||
*
|
||||
* @example
|
||||
* This function is meant to be used in a catch block to ensure that you caught the right error.
|
||||
*
|
||||
* ```ts
|
||||
* // Ensuring that you got an Error
|
||||
* try {
|
||||
* mayThrow();
|
||||
* } catch (error) {
|
||||
* ensureError(error);
|
||||
* console.err(error.message);
|
||||
* }
|
||||
*
|
||||
* // Ensuring that you got a specific error
|
||||
* try {
|
||||
* mayThrow();
|
||||
* } catch (error) {
|
||||
* ensureError(error, MyError);
|
||||
* console.err(error.myMessage());
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param thrown The value to check.
|
||||
* @param ErrorType The error type to check against.
|
||||
* @throws The value if its not an instance of the specified error type.
|
||||
*/
|
||||
export function ensureError<ErrorT extends Error>(
|
||||
thrown: unknown,
|
||||
ErrorType?: new (...args: any[]) => ErrorT,
|
||||
): asserts thrown is ErrorT {
|
||||
if (ErrorType === undefined) {
|
||||
if (thrown instanceof Error) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw thrown;
|
||||
}
|
||||
|
||||
if (thrown instanceof ErrorType) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw thrown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided value is a NodeJS.ErrnoException with a string 'code'.
|
||||
* @param thrown The value to check.
|
||||
* @throws The value if its not an error or if it doesn't have a code property.
|
||||
* @example
|
||||
* ```ts
|
||||
* try {
|
||||
* await fs.promises.readFile("non-existing-file.txt");
|
||||
* } catch (error) {
|
||||
* ensureNodeErrnoExceptionError(error);
|
||||
* console.error(error.code);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function ensureNodeErrnoExceptionError(
|
||||
thrown: unknown,
|
||||
): asserts thrown is NodeJS.ErrnoException & Error & { code: string } {
|
||||
ensureError(thrown);
|
||||
|
||||
if (!("code" in thrown) || typeof thrown.code !== "string") {
|
||||
throw thrown;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error for an unreachable code path. This function is typically
|
||||
* used in a default case of a switch statement where all possible values of
|
||||
* the switched variable should be handled in other cases. If the default case
|
||||
* is reached, it means that an unexpected value was encountered, so an error
|
||||
* is thrown.
|
||||
*
|
||||
* Note: The `@typescript-eslint/switch-exhaustiveness-check` rule checks for
|
||||
* exhaustiveness in switch statements by comparing the types of the switch
|
||||
* expression and the case clauses. However, it only works with union types and
|
||||
* enum types. If you're switching on the result of the `typeof` operator or
|
||||
* any other expression that doesn't return a union type or an enum type, this
|
||||
* rule cannot enforce exhaustiveness. In such cases, you can use this function
|
||||
* in the default case to ensure that an error is thrown if an unexpected value
|
||||
* is encountered.
|
||||
*
|
||||
* @param _value The unexpected value. This parameter is unused and is only for
|
||||
* the purpose of type checking.
|
||||
* @param error The error to throw.
|
||||
* @returns This function never returns normally. It always throws an error.
|
||||
* @throws Will throw an error when called.
|
||||
*/
|
||||
export function unreachable(_value: never, error: Error): never {
|
||||
throw error;
|
||||
}
|
||||
75
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/bytecode.ts
generated
vendored
Executable file
75
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/bytecode.ts
generated
vendored
Executable file
@@ -0,0 +1,75 @@
|
||||
import type { LibraryAddresses, LibraryLink } from "../internal/bytecode.js";
|
||||
|
||||
import { CustomError } from "../error.js";
|
||||
|
||||
export class InvalidLibraryAddressError extends CustomError {
|
||||
constructor(libraries: LibraryAddresses) {
|
||||
const formattedLibraries = Object.entries(libraries)
|
||||
.map(([libraryName, address]) => `\t* "${libraryName}": "${address}"`)
|
||||
.join("\n");
|
||||
|
||||
super(`The following libraries have invalid addresses:
|
||||
${formattedLibraries}
|
||||
|
||||
Please provide valid Ethereum addresses for these libraries.`);
|
||||
}
|
||||
}
|
||||
|
||||
export class AmbiguousLibraryNameError extends CustomError {
|
||||
constructor(libraries: Record<string, LibraryLink[]>) {
|
||||
const formattedLibraries = Object.entries(libraries)
|
||||
.map(
|
||||
([providedLibraryName, matchingLibraries]) =>
|
||||
`\t* "${providedLibraryName}":\n${matchingLibraries
|
||||
.map(({ libraryFqn }) => `\t\t* "${libraryFqn}"`)
|
||||
.join("\n")}`,
|
||||
)
|
||||
.join("\n");
|
||||
|
||||
super(`The following libraries may resolve to multiple libraries:
|
||||
${formattedLibraries}
|
||||
|
||||
Please provide the fully qualified name for these libraries.`);
|
||||
}
|
||||
}
|
||||
|
||||
export class UnnecessaryLibraryError extends CustomError {
|
||||
constructor(libraries: string[]) {
|
||||
const formattedLibraries = libraries
|
||||
.map((libraryName) => `\t* "${libraryName}"`)
|
||||
.join("\n");
|
||||
|
||||
super(`The following libraries are not referenced by the contract:
|
||||
${formattedLibraries}
|
||||
|
||||
Please provide only the libraries that are needed.`);
|
||||
}
|
||||
}
|
||||
|
||||
export class OverlappingLibrariesError extends CustomError {
|
||||
constructor(libraries: string[]) {
|
||||
const formattedLibraries = libraries
|
||||
.map((libraryFqn) => `\t* "${libraryFqn}"`)
|
||||
.join("\n");
|
||||
|
||||
super(`The following libraries are provided more than once:
|
||||
${formattedLibraries}
|
||||
|
||||
Please ensure that each library is provided only once, either by its name or its fully qualified name.`);
|
||||
}
|
||||
}
|
||||
|
||||
export class MissingLibrariesError extends CustomError {
|
||||
constructor(libraries: string[]) {
|
||||
const formattedLibraries = libraries
|
||||
.map((libraryFqn) => `\t* "${libraryFqn}"`)
|
||||
.join("\n");
|
||||
|
||||
super(
|
||||
`The following libraries are missing:
|
||||
${formattedLibraries}
|
||||
|
||||
Please provide all the required libraries.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
47
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/fs.ts
generated
vendored
Executable file
47
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/fs.ts
generated
vendored
Executable file
@@ -0,0 +1,47 @@
|
||||
import { CustomError } from "../error.js";
|
||||
|
||||
// We use this error to encapsulate any other error possibly thrown by node's
|
||||
// fs apis, as sometimes their errors don't have stack traces.
|
||||
export class FileSystemAccessError extends CustomError {}
|
||||
|
||||
export class FileNotFoundError extends CustomError {
|
||||
constructor(filePath: string, cause?: Error) {
|
||||
super(`File ${filePath} not found`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class FileAlreadyExistsError extends CustomError {
|
||||
constructor(filePath: string, cause?: Error) {
|
||||
super(`File ${filePath} already exists`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidFileFormatError extends CustomError {
|
||||
constructor(filePath: string, cause: Error) {
|
||||
super(`Invalid file format: ${filePath}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class JsonSerializationError extends CustomError {
|
||||
constructor(filePath: string, cause: Error) {
|
||||
super(`Error serializing JSON file ${filePath}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class NotADirectoryError extends CustomError {
|
||||
constructor(filePath: string, cause: Error) {
|
||||
super(`Path ${filePath} is not a directory`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class IsDirectoryError extends CustomError {
|
||||
constructor(filePath: string, cause: Error | undefined) {
|
||||
super(`Path ${filePath} is a directory`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class DirectoryNotEmptyError extends CustomError {
|
||||
constructor(filePath: string, cause: Error) {
|
||||
super(`Directory ${filePath} is not empty`, cause);
|
||||
}
|
||||
}
|
||||
13
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/package.ts
generated
vendored
Executable file
13
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/package.ts
generated
vendored
Executable file
@@ -0,0 +1,13 @@
|
||||
import { CustomError } from "../error.js";
|
||||
|
||||
export class PackageJsonNotFoundError extends CustomError {
|
||||
constructor(filePathOrUrl: string, cause?: Error) {
|
||||
super(`No package.json found for ${filePathOrUrl}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class PackageJsonReadError extends CustomError {
|
||||
constructor(packageJsonPath: string, cause?: Error) {
|
||||
super(`Failed to read package.json at ${packageJsonPath}`, cause);
|
||||
}
|
||||
}
|
||||
84
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/request.ts
generated
vendored
Executable file
84
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/request.ts
generated
vendored
Executable file
@@ -0,0 +1,84 @@
|
||||
import type UndiciT from "undici";
|
||||
|
||||
import { CustomError } from "../error.js";
|
||||
import { sanitizeUrl } from "../internal/request.js";
|
||||
import { isObject } from "../lang.js";
|
||||
|
||||
export class RequestError extends CustomError {
|
||||
constructor(url: string, type: UndiciT.Dispatcher.HttpMethod, cause?: Error) {
|
||||
super(`Failed to make ${type} request to ${sanitizeUrl(url)}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class DownloadError extends CustomError {
|
||||
constructor(url: string, cause?: Error) {
|
||||
super(`Failed to download file from ${sanitizeUrl(url)}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class DispatcherError extends CustomError {
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(`Failed to create dispatcher: ${message}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class RequestTimeoutError extends CustomError {
|
||||
constructor(url: string, cause?: Error) {
|
||||
super(`Request to ${sanitizeUrl(url)} timed out`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class ConnectionRefusedError extends CustomError {
|
||||
constructor(url: string, cause?: Error) {
|
||||
super(`Connection to ${sanitizeUrl(url)} was refused`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class ResponseStatusCodeError extends CustomError {
|
||||
public readonly statusCode: number;
|
||||
public readonly headers:
|
||||
| string[]
|
||||
| Record<string, string | string[] | undefined>
|
||||
| null;
|
||||
public readonly body: null | Record<string, any> | string;
|
||||
|
||||
constructor(url: string, cause: Error) {
|
||||
super(`Received an unexpected status code from ${sanitizeUrl(url)}`, cause);
|
||||
this.statusCode =
|
||||
"statusCode" in cause && typeof cause.statusCode === "number"
|
||||
? cause.statusCode
|
||||
: -1;
|
||||
this.headers = this.#extractHeaders(cause);
|
||||
this.body = "body" in cause && isObject(cause.body) ? cause.body : null;
|
||||
}
|
||||
|
||||
#extractHeaders(
|
||||
cause: Error,
|
||||
): string[] | Record<string, string | string[] | undefined> | null {
|
||||
if ("headers" in cause) {
|
||||
const headers = cause.headers;
|
||||
if (Array.isArray(headers)) {
|
||||
return headers;
|
||||
} else if (this.#isValidHeaders(headers)) {
|
||||
return headers;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
#isValidHeaders(
|
||||
headers: unknown,
|
||||
): headers is Record<string, string | string[] | undefined> {
|
||||
if (!isObject(headers)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Object.values(headers).every(
|
||||
(header) =>
|
||||
typeof header === "string" ||
|
||||
(Array.isArray(header) &&
|
||||
header.every((item: unknown) => typeof item === "string")) ||
|
||||
header === undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
17
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/subprocess.ts
generated
vendored
Executable file
17
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/subprocess.ts
generated
vendored
Executable file
@@ -0,0 +1,17 @@
|
||||
import { CustomError } from "../error.js";
|
||||
|
||||
export class SubprocessFileNotFoundError extends CustomError {
|
||||
constructor(filePath: string) {
|
||||
super(
|
||||
`Cannot find the subprocess file to execute, invalid file path: ${filePath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class SubprocessPathIsDirectoryError extends CustomError {
|
||||
constructor(path: string) {
|
||||
super(
|
||||
`The provided path is a directory, only files are accepted. Path: ${path}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
75
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/synchronization.ts
generated
vendored
Executable file
75
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/errors/synchronization.ts
generated
vendored
Executable file
@@ -0,0 +1,75 @@
|
||||
import { CustomError } from "../error.js";
|
||||
|
||||
export class BaseMultiProcessMutexError extends CustomError {
|
||||
constructor(message: string, cause?: Error) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidMultiProcessMutexPathError extends BaseMultiProcessMutexError {
|
||||
constructor(mutexPath: string) {
|
||||
super(`The path ${mutexPath} is not a valid absolute path.`);
|
||||
}
|
||||
}
|
||||
|
||||
export class MultiProcessMutexError extends BaseMultiProcessMutexError {
|
||||
constructor(lockPath: string, cause: Error) {
|
||||
super(`Unexpected error with lock at ${lockPath}: ${cause.message}`, cause);
|
||||
}
|
||||
}
|
||||
|
||||
export class MultiProcessMutexTimeoutError extends BaseMultiProcessMutexError {
|
||||
constructor(lockPath: string, timeoutMs: number) {
|
||||
super(
|
||||
`Timed out waiting to acquire lock at ${lockPath} after ${timeoutMs}ms`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class StaleMultiProcessMutexError extends BaseMultiProcessMutexError {
|
||||
constructor(lockPath: string, ownerUid: number | undefined, cause: Error) {
|
||||
const uidInfo = ownerUid !== undefined ? ` (uid: ${ownerUid})` : "";
|
||||
super(
|
||||
`Lock at ${lockPath} appears stale but cannot be removed due to insufficient permissions${uidInfo}. Please remove it manually: ${lockPath}`,
|
||||
cause,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class IncompatibleMultiProcessMutexError extends BaseMultiProcessMutexError {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export class IncompatibleHostnameMultiProcessMutexError extends IncompatibleMultiProcessMutexError {
|
||||
constructor(
|
||||
lockPath: string,
|
||||
foreignHostname: string,
|
||||
currentHostname: string,
|
||||
) {
|
||||
super(
|
||||
`Lock at ${lockPath} was created by a different host (${foreignHostname}, current: ${currentHostname}). It cannot be verified or removed automatically. Please remove it manually: ${lockPath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class IncompatiblePlatformMultiProcessMutexError extends IncompatibleMultiProcessMutexError {
|
||||
constructor(
|
||||
lockPath: string,
|
||||
foreignPlatform: string,
|
||||
currentPlatform: string,
|
||||
) {
|
||||
super(
|
||||
`Lock at ${lockPath} was created on a different platform (${foreignPlatform}, current: ${currentPlatform}). It cannot be verified or removed automatically. Please remove it manually: ${lockPath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class IncompatibleUidMultiProcessMutexError extends IncompatibleMultiProcessMutexError {
|
||||
constructor(lockPath: string, foreignUid: number, currentUid: number) {
|
||||
super(
|
||||
`Lock at ${lockPath} is owned by a different user (uid: ${foreignUid}, current: ${currentUid}). It cannot be removed automatically. Please remove it manually: ${lockPath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
91
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/eth.ts
generated
vendored
Executable file
91
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/eth.ts
generated
vendored
Executable file
@@ -0,0 +1,91 @@
|
||||
import type { PrefixedHexString } from "./hex.js";
|
||||
|
||||
import { bytesToHexString, numberToHexString, setLengthLeft } from "./hex.js";
|
||||
import {
|
||||
getAddressGenerator,
|
||||
getHashGenerator,
|
||||
isValidChecksum,
|
||||
} from "./internal/eth.js";
|
||||
|
||||
/**
|
||||
* Checks if a value is an Ethereum address.
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is an Ethereum address, false otherwise.
|
||||
*/
|
||||
export function isAddress(value: unknown): value is PrefixedHexString {
|
||||
return typeof value === "string" && /^0x[0-9a-f]{40}$/i.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is an Ethereum address and if the checksum is valid.
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is an Ethereum address with a valid checksum, false otherwise.
|
||||
*/
|
||||
export async function isValidChecksumAddress(value: unknown): Promise<boolean> {
|
||||
return isAddress(value) && isValidChecksum(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is an Ethereum hash.
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is an Ethereum hash, false otherwise.
|
||||
*/
|
||||
export function isHash(value: unknown): value is PrefixedHexString {
|
||||
return typeof value === "string" && /^0x[0-9a-f]{64}$/i.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a number to a hexadecimal string with a length of 32 bytes.
|
||||
*
|
||||
* @param value The number to convert.
|
||||
* @returns The hexadecimal representation of the number padded to 32 bytes.
|
||||
* @throws InvalidParameterError If the input is not a safe integer or is negative.
|
||||
*/
|
||||
export function toEvmWord(value: bigint | number): PrefixedHexString {
|
||||
return setLengthLeft(numberToHexString(value), 64);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a pseudo-random sequence of hash bytes.
|
||||
*
|
||||
* @returns A pseudo-random sequence of hash bytes.
|
||||
*/
|
||||
export async function generateHashBytes(): Promise<Uint8Array> {
|
||||
const hashGenerator = await getHashGenerator();
|
||||
return hashGenerator.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a pseudo-random hash.
|
||||
*
|
||||
* @returns A pseudo-random hash.
|
||||
*/
|
||||
export async function randomHash(): Promise<PrefixedHexString> {
|
||||
const hashBytes = await generateHashBytes();
|
||||
return bytesToHexString(hashBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a pseudo-random sequence of hash bytes that can be used as an
|
||||
* address.
|
||||
*
|
||||
* @returns A pseudo-random sequence of hash bytes.
|
||||
*/
|
||||
export async function generateAddressBytes(): Promise<Uint8Array> {
|
||||
const addressGenerator = await getAddressGenerator();
|
||||
const hashBytes = await addressGenerator.next();
|
||||
return hashBytes.slice(0, 20);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a pseudo-random address.
|
||||
*
|
||||
* @returns A pseudo-random address.
|
||||
*/
|
||||
export async function randomAddress(): Promise<PrefixedHexString> {
|
||||
const addressBytes = await generateAddressBytes();
|
||||
return bytesToHexString(addressBytes);
|
||||
}
|
||||
184
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/format.ts
generated
vendored
Executable file
184
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/format.ts
generated
vendored
Executable file
@@ -0,0 +1,184 @@
|
||||
import {
|
||||
getColumnWidths,
|
||||
getContentWidth,
|
||||
getHeadingWidth,
|
||||
getStringWidth,
|
||||
renderContentLine,
|
||||
renderHeaderOpen,
|
||||
renderRowSeparator,
|
||||
renderSectionClose,
|
||||
} from "./internal/format.js";
|
||||
|
||||
export interface TableTitle {
|
||||
type: "title";
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface TableSectionHeader {
|
||||
type: "section-header";
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface TableHeader {
|
||||
type: "header";
|
||||
cells: string[];
|
||||
}
|
||||
|
||||
export interface TableRow {
|
||||
type: "row";
|
||||
cells: string[];
|
||||
}
|
||||
|
||||
export type TableItem =
|
||||
| TableTitle
|
||||
| TableSectionHeader
|
||||
| TableHeader
|
||||
| TableRow;
|
||||
|
||||
/**
|
||||
* Formats an array of titles, section headers, headers, and rows into a table
|
||||
* string with box-drawing characters.
|
||||
*
|
||||
* Features:
|
||||
* - Titles are centered in a standalone box with double borders (╔═╗)
|
||||
* - Section headers group related content with automatic closing
|
||||
* - Headers and rows can have different numbers of cells
|
||||
* - Rows with fewer cells than max columns are handled with special rendering
|
||||
*
|
||||
* @param items An array of table items (titles, section headers, headers, and rows).
|
||||
* Sections are automatically closed when a new section-header or title appears, or
|
||||
* at the end of the table.
|
||||
* @returns The formatted table as a string, ready to be rendered.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* formatTable([
|
||||
* { type: "title", text: "My Table" },
|
||||
* { type: "section-header", text: "User Data" },
|
||||
* { type: "header", cells: ["Name", "Age", "City"] },
|
||||
* { type: "row", cells: ["Alice", "30", "NYC"] },
|
||||
* { type: "row", cells: ["Bob", "25", "LA"] },
|
||||
* { type: "section-header", text: "Summary" },
|
||||
* { type: "header", cells: ["Total", "Count"] },
|
||||
* { type: "row", cells: ["55", "2"] }
|
||||
* ]);
|
||||
*
|
||||
* // =>
|
||||
* // ╔═══════════════════╗
|
||||
* // ║ My Table ║
|
||||
* // ╚═══════════════════╝
|
||||
* // ╔═══════════════════╗
|
||||
* // ║ User Data ║
|
||||
* // ╟───────┬─────┬─────╢
|
||||
* // ║ Name │ Age │ City║
|
||||
* // ╟───────┼─────┼─────╢
|
||||
* // ║ Alice │ 30 │ NYC ║
|
||||
* // ╟───────┼─────┼─────╢
|
||||
* // ║ Bob │ 25 │ LA ║
|
||||
* // ╚═══════╧═════╧═════╝
|
||||
* // ╔═══════════════════╗
|
||||
* // ║ Summary ║
|
||||
* // ╟───────┬───────────╢
|
||||
* // ║ Total │ Count ║
|
||||
* // ╟───────┼───────────╢
|
||||
* // ║ 55 │ 2 ║
|
||||
* // ╚═══════╧═══════════╝
|
||||
* ```
|
||||
*/
|
||||
export function formatTable(items: TableItem[]): string {
|
||||
if (items.length === 0) {
|
||||
return "";
|
||||
}
|
||||
|
||||
const columnWidths = getColumnWidths(items);
|
||||
const contentWidth = getContentWidth(columnWidths);
|
||||
const headingWidth = getHeadingWidth(items);
|
||||
|
||||
// If heading is wider than content, expand last column to fit
|
||||
if (headingWidth > contentWidth && columnWidths.length > 0) {
|
||||
const extraSpace = headingWidth - contentWidth;
|
||||
columnWidths[columnWidths.length - 1] += extraSpace;
|
||||
}
|
||||
|
||||
const tableWidth = Math.max(contentWidth, headingWidth);
|
||||
|
||||
const lines: string[] = [];
|
||||
let previousCellCount = 0; // Keep track of previous row/header cell count
|
||||
let inSection = false;
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const [previous, current] = [items[i - 1], items[i]];
|
||||
|
||||
if (current.type === "title") {
|
||||
if (inSection) {
|
||||
lines.push(renderSectionClose(columnWidths, previousCellCount));
|
||||
inSection = false;
|
||||
}
|
||||
|
||||
lines.push("╔" + "═".repeat(tableWidth) + "╗");
|
||||
const titleDisplayWidth = getStringWidth(current.text);
|
||||
const titleActualLength = current.text.length;
|
||||
const centeredTitle = current.text
|
||||
.padStart(
|
||||
(tableWidth + titleDisplayWidth) / 2 +
|
||||
(titleActualLength - titleDisplayWidth),
|
||||
)
|
||||
.padEnd(tableWidth + (titleActualLength - titleDisplayWidth));
|
||||
lines.push("║" + centeredTitle + "║");
|
||||
lines.push("╚" + "═".repeat(tableWidth) + "╝");
|
||||
} else if (current.type === "section-header") {
|
||||
if (inSection) {
|
||||
lines.push(renderSectionClose(columnWidths, previousCellCount));
|
||||
}
|
||||
|
||||
lines.push("╔" + "═".repeat(tableWidth) + "╗");
|
||||
const headerDisplayWidth = getStringWidth(current.text);
|
||||
const headerActualLength = current.text.length;
|
||||
const paddedHeader = current.text.padEnd(
|
||||
tableWidth - 2 + (headerActualLength - headerDisplayWidth),
|
||||
);
|
||||
lines.push("║ " + paddedHeader + " ║");
|
||||
inSection = true;
|
||||
} else if (current.type === "header") {
|
||||
const currentCellCount = current.cells.length;
|
||||
const innerJoiner =
|
||||
previous !== undefined && previous.type === "section-header"
|
||||
? "┬"
|
||||
: "┼";
|
||||
const needsTransition =
|
||||
previous !== undefined &&
|
||||
previous.type !== "section-header" &&
|
||||
currentCellCount < previousCellCount;
|
||||
|
||||
lines.push(
|
||||
renderHeaderOpen(
|
||||
columnWidths,
|
||||
currentCellCount,
|
||||
innerJoiner,
|
||||
needsTransition,
|
||||
),
|
||||
);
|
||||
lines.push(
|
||||
renderContentLine(current.cells, columnWidths, currentCellCount),
|
||||
);
|
||||
previousCellCount = currentCellCount;
|
||||
} else if (current.type === "row") {
|
||||
const currentCellCount = current.cells.length;
|
||||
|
||||
// Only add separator if previous wasn't a row
|
||||
if (previous === undefined || previous.type !== "row") {
|
||||
lines.push(renderRowSeparator(columnWidths, currentCellCount));
|
||||
}
|
||||
lines.push(
|
||||
renderContentLine(current.cells, columnWidths, currentCellCount),
|
||||
);
|
||||
previousCellCount = currentCellCount;
|
||||
}
|
||||
}
|
||||
|
||||
if (inSection) {
|
||||
lines.push(renderSectionClose(columnWidths, previousCellCount));
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
}
|
||||
888
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/fs.ts
generated
vendored
Executable file
888
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/fs.ts
generated
vendored
Executable file
@@ -0,0 +1,888 @@
|
||||
import type { JsonTypes, ParsedElementInfo } from "@streamparser/json-node";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
|
||||
import fsPromises from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import path from "node:path";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
|
||||
import { JSONParser } from "@streamparser/json-node";
|
||||
import { JsonStreamStringify } from "json-stream-stringify";
|
||||
|
||||
import { ensureError, ensureNodeErrnoExceptionError } from "./error.js";
|
||||
import {
|
||||
FileNotFoundError,
|
||||
FileSystemAccessError,
|
||||
InvalidFileFormatError,
|
||||
JsonSerializationError,
|
||||
FileAlreadyExistsError,
|
||||
NotADirectoryError,
|
||||
IsDirectoryError,
|
||||
DirectoryNotEmptyError,
|
||||
} from "./errors/fs.js";
|
||||
|
||||
/**
|
||||
* Determines the canonical pathname for a given path, resolving any symbolic
|
||||
* links, and returns it.
|
||||
*
|
||||
* @throws FileNotFoundError if absolutePath doesn't exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getRealPath(absolutePath: string): Promise<string> {
|
||||
try {
|
||||
return await fsPromises.realpath(path.normalize(absolutePath));
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively searches a directory and its subdirectories for files that
|
||||
* satisfy the specified condition, returning their absolute paths.
|
||||
*
|
||||
* @param dirFrom The absolute path of the directory to start the search from.
|
||||
* @param matches A function to filter files (not directories).
|
||||
* @param directoryFilter A function to filter which directories to recurse into
|
||||
* @returns An array of absolute paths. Each file has its true case, except
|
||||
* for the initial dirFrom part, which preserves the given casing.
|
||||
* No order is guaranteed. If dirFrom doesn't exist `[]` is returned.
|
||||
* @throws NotADirectoryError if dirFrom is not a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getAllFilesMatching(
|
||||
dirFrom: string,
|
||||
matches?: (absolutePathToFile: string) => Promise<boolean> | boolean,
|
||||
directoryFilter?: (absolutePathToDir: string) => Promise<boolean> | boolean,
|
||||
): Promise<string[]> {
|
||||
const dirContent = await readdirOrEmpty(dirFrom);
|
||||
|
||||
const results = await Promise.all(
|
||||
dirContent.map(async (file) => {
|
||||
const absolutePathToFile = path.join(dirFrom, file);
|
||||
if (await isDirectory(absolutePathToFile)) {
|
||||
if (
|
||||
directoryFilter === undefined ||
|
||||
(await directoryFilter(absolutePathToFile))
|
||||
) {
|
||||
return getAllFilesMatching(
|
||||
absolutePathToFile,
|
||||
matches,
|
||||
directoryFilter,
|
||||
);
|
||||
}
|
||||
|
||||
return [];
|
||||
} else if (matches === undefined || (await matches(absolutePathToFile))) {
|
||||
return absolutePathToFile;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
return results.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively searches a directory and its subdirectories for directories that
|
||||
* satisfy the specified condition, returning their absolute paths. Once a
|
||||
* directory is found, its subdirectories are not searched.
|
||||
*
|
||||
* Note: dirFrom is never returned, nor is `matches` called on it.
|
||||
*
|
||||
* @param dirFrom The absolute path of the directory to start the search from.
|
||||
* @param matches A function to filter directories (not files).
|
||||
* @returns An array of absolute paths. Each path has its true case, except
|
||||
* for the initial dirFrom part, which preserves the given casing.
|
||||
* No order is guaranteed. If dirFrom doesn't exist `[]` is returned.
|
||||
* @throws NotADirectoryError if dirFrom is not a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getAllDirectoriesMatching(
|
||||
dirFrom: string,
|
||||
matches?: (absolutePathToDir: string) => Promise<boolean> | boolean,
|
||||
): Promise<string[]> {
|
||||
const dirContent = await readdirOrEmpty(dirFrom);
|
||||
|
||||
const results = await Promise.all(
|
||||
dirContent.map(async (file) => {
|
||||
const absolutePathToFile = path.join(dirFrom, file);
|
||||
if (!(await isDirectory(absolutePathToFile))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
if (matches === undefined || (await matches(absolutePathToFile))) {
|
||||
return absolutePathToFile;
|
||||
}
|
||||
|
||||
return getAllDirectoriesMatching(absolutePathToFile, matches);
|
||||
}),
|
||||
);
|
||||
|
||||
return results.flat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the true case path of a given relative path from a specified
|
||||
* directory, without resolving symbolic links, and returns it.
|
||||
*
|
||||
* @param from The absolute path of the directory to start the search from.
|
||||
* @param relativePath The relative path to get the true case of.
|
||||
* @returns The true case of the relative path.
|
||||
* @throws FileNotFoundError if the starting directory or the relative path doesn't exist.
|
||||
* @throws NotADirectoryError if the starting directory is not a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getFileTrueCase(
|
||||
from: string,
|
||||
relativePath: string,
|
||||
): Promise<string> {
|
||||
const dirEntries = await readdirOrEmpty(from);
|
||||
|
||||
const segments = relativePath.split(path.sep);
|
||||
const nextDir = segments[0];
|
||||
const nextDirLowerCase = nextDir.toLowerCase();
|
||||
|
||||
for (const dirEntry of dirEntries) {
|
||||
if (dirEntry.toLowerCase() === nextDirLowerCase) {
|
||||
if (segments.length === 1) {
|
||||
return dirEntry;
|
||||
}
|
||||
|
||||
return path.join(
|
||||
dirEntry,
|
||||
await getFileTrueCase(
|
||||
path.join(from, dirEntry),
|
||||
path.relative(nextDir, relativePath),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
throw new FileNotFoundError(path.join(from, relativePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a given path is a directory.
|
||||
*
|
||||
* @param absolutePath The path to check.
|
||||
* @returns `true` if the path is a directory, `false` otherwise.
|
||||
* @throws FileNotFoundError if the path doesn't exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function isDirectory(absolutePath: string): Promise<boolean> {
|
||||
try {
|
||||
return (await fsPromises.lstat(absolutePath)).isDirectory();
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a JSON file and parses it. The encoding used is "utf8".
|
||||
*
|
||||
* @param absolutePathToFile The path to the file.
|
||||
* @returns The parsed JSON object.
|
||||
* @throws FileNotFoundError if the file doesn't exist.
|
||||
* @throws InvalidFileFormatError if the file is not a valid JSON file.
|
||||
* @throws IsDirectoryError if the path is a directory instead of a file.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function readJsonFile<T>(absolutePathToFile: string): Promise<T> {
|
||||
const content = await readUtf8File(absolutePathToFile);
|
||||
try {
|
||||
return JSON.parse(content.toString());
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
throw new InvalidFileFormatError(absolutePathToFile, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a JSON file as a stream and parses it. The encoding used is "utf8".
|
||||
* This function should be used when parsing very large JSON files.
|
||||
*
|
||||
* @param absolutePathToFile The path to the file.
|
||||
* @returns The parsed JSON object.
|
||||
* @throws FileNotFoundError if the file doesn't exist.
|
||||
* @throws InvalidFileFormatError if the file is not a valid JSON file.
|
||||
* @throws IsDirectoryError if the path is a directory instead of a file.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function readJsonFileAsStream<T>(
|
||||
absolutePathToFile: string,
|
||||
): Promise<T> {
|
||||
let fileHandle: FileHandle | undefined;
|
||||
|
||||
try {
|
||||
fileHandle = await fsPromises.open(absolutePathToFile, "r");
|
||||
|
||||
const fileReadStream = fileHandle.createReadStream();
|
||||
|
||||
// NOTE: We set a separator to disable self-closing to be able to use the parser
|
||||
// in the stream.pipeline context; see https://github.com/juanjoDiaz/streamparser-json/issues/47
|
||||
const jsonParser = new JSONParser({
|
||||
separator: "",
|
||||
});
|
||||
|
||||
const result: T | undefined = await pipeline(
|
||||
fileReadStream,
|
||||
jsonParser,
|
||||
async (
|
||||
elements: AsyncIterable<ParsedElementInfo.ParsedElementInfo>,
|
||||
): Promise<any | undefined> => {
|
||||
let value: JsonTypes.JsonPrimitive | JsonTypes.JsonStruct | undefined;
|
||||
for await (const element of elements) {
|
||||
value = element.value;
|
||||
}
|
||||
return value;
|
||||
},
|
||||
);
|
||||
|
||||
if (result === undefined) {
|
||||
throw new Error("No data");
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
// If the code is defined, we assume the error to be related to the file system
|
||||
if ("code" in e) {
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
if (e.code === "EISDIR") {
|
||||
throw new IsDirectoryError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
// If the code is defined, we assume the error to be related to the file system
|
||||
if (e.code !== undefined) {
|
||||
throw new FileSystemAccessError(absolutePathToFile, e);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we assume the error to be related to the file formatting
|
||||
throw new InvalidFileFormatError(absolutePathToFile, e);
|
||||
} finally {
|
||||
// Explicitly closing the file handle to fully release the underlying resources
|
||||
await fileHandle?.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes an object to a JSON file. The encoding used is "utf8" and the file is overwritten.
|
||||
* If part of the path doesn't exist, it will be created.
|
||||
*
|
||||
* @param absolutePathToFile The path to the file. If the file exists, it will be overwritten.
|
||||
* @param object The object to write.
|
||||
* @throws JsonSerializationError if the object can't be serialized to JSON.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function writeJsonFile<T>(
|
||||
absolutePathToFile: string,
|
||||
object: T,
|
||||
): Promise<void> {
|
||||
let content;
|
||||
try {
|
||||
content = JSON.stringify(object, null, 2);
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
throw new JsonSerializationError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
await writeUtf8File(absolutePathToFile, content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes an object to a JSON file as stream. The encoding used is "utf8" and the file is overwritten.
|
||||
* If part of the path doesn't exist, it will be created.
|
||||
* This function should be used when stringifying very large JSON objects.
|
||||
*
|
||||
* @param absolutePathToFile The path to the file. If the file exists, it will be overwritten.
|
||||
* @param object The object to write.
|
||||
* @throws JsonSerializationError if the object can't be serialized to JSON.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function writeJsonFileAsStream<T>(
|
||||
absolutePathToFile: string,
|
||||
object: T,
|
||||
): Promise<void> {
|
||||
const dirPath = path.dirname(absolutePathToFile);
|
||||
const dirExists = await exists(dirPath);
|
||||
if (!dirExists) {
|
||||
await mkdir(dirPath);
|
||||
}
|
||||
|
||||
let fileHandle: FileHandle | undefined;
|
||||
|
||||
try {
|
||||
fileHandle = await fsPromises.open(absolutePathToFile, "w");
|
||||
|
||||
const jsonStream = new JsonStreamStringify(object);
|
||||
const fileWriteStream = fileHandle.createWriteStream();
|
||||
|
||||
await pipeline(jsonStream, fileWriteStream);
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
// if the directory was created, we should remove it
|
||||
if (dirExists === false) {
|
||||
try {
|
||||
await remove(dirPath);
|
||||
// we don't want to override the original error
|
||||
} catch (_error) {}
|
||||
}
|
||||
|
||||
// If the code is defined, we assume the error to be related to the file system
|
||||
if ("code" in e && e.code !== undefined) {
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
|
||||
// Otherwise, we assume the error to be related to the file formatting
|
||||
throw new JsonSerializationError(absolutePathToFile, e);
|
||||
} finally {
|
||||
// NOTE: Historically, not closing the file handle caused issues on Windows,
|
||||
// for example, when trying to move the file previously written to by this function
|
||||
await fileHandle?.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a file and returns its content as a string. The encoding used is "utf8".
|
||||
*
|
||||
* @param absolutePathToFile The path to the file.
|
||||
* @returns The content of the file as a string.
|
||||
* @throws FileNotFoundError if the file doesn't exist.
|
||||
* @throws IsDirectoryError if the path is a directory instead of a file.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function readUtf8File(
|
||||
absolutePathToFile: string,
|
||||
): Promise<string> {
|
||||
try {
|
||||
return await fsPromises.readFile(absolutePathToFile, { encoding: "utf8" });
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
if (e.code === "EISDIR") {
|
||||
throw new IsDirectoryError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a string to a file. The encoding used is "utf8" and the file is overwritten by default.
|
||||
* If part of the path doesn't exist, it will be created.
|
||||
*
|
||||
* @param absolutePathToFile The path to the file.
|
||||
* @param data The data to write.
|
||||
* @param flag The flag to use when writing the file. If not provided, the file will be overwritten.
|
||||
* See https://nodejs.org/docs/latest-v20.x/api/fs.html#file-system-flags for more information.
|
||||
* @throws FileAlreadyExistsError if the file already exists and the flag "x" is used.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function writeUtf8File(
|
||||
absolutePathToFile: string,
|
||||
data: string,
|
||||
flag?: string,
|
||||
): Promise<void> {
|
||||
const dirPath = path.dirname(absolutePathToFile);
|
||||
const dirExists = await exists(dirPath);
|
||||
if (!dirExists) {
|
||||
await mkdir(dirPath);
|
||||
}
|
||||
|
||||
try {
|
||||
await fsPromises.writeFile(absolutePathToFile, data, {
|
||||
encoding: "utf8",
|
||||
flag,
|
||||
});
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
// if the directory was created, we should remove it
|
||||
if (dirExists === false) {
|
||||
try {
|
||||
await remove(dirPath);
|
||||
// we don't want to override the original error
|
||||
} catch (_error) {}
|
||||
}
|
||||
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
// flag "x" has been used and the file already exists
|
||||
if (e.code === "EEXIST") {
|
||||
throw new FileAlreadyExistsError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a file and returns its content as a Uint8Array.
|
||||
*
|
||||
* @param absolutePathToFile The path to the file.
|
||||
* @returns The content of the file as a Uint8Array.
|
||||
* @throws FileNotFoundError if the file doesn't exist.
|
||||
* @throws IsDirectoryError if the path is a directory instead of a file.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function readBinaryFile(
|
||||
absolutePathToFile: string,
|
||||
): Promise<Uint8Array> {
|
||||
try {
|
||||
const buffer = await fsPromises.readFile(absolutePathToFile);
|
||||
return new Uint8Array(buffer);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
if (e.code === "EISDIR") {
|
||||
throw new IsDirectoryError(absolutePathToFile, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a directory and returns its content as an array of strings.
|
||||
*
|
||||
* @param absolutePathToDir The path to the directory.
|
||||
* @returns An array of strings with the names of the files and directories in the directory.
|
||||
* @throws FileNotFoundError if the directory doesn't exist.
|
||||
* @throws NotADirectoryError if the path is not a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function readdir(absolutePathToDir: string): Promise<string[]> {
|
||||
try {
|
||||
return await fsPromises.readdir(absolutePathToDir);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePathToDir, e);
|
||||
}
|
||||
|
||||
if (e.code === "ENOTDIR") {
|
||||
throw new NotADirectoryError(absolutePathToDir, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around `readdir` that returns an empty array if the directory doesn't exist.
|
||||
*
|
||||
* @see readdir
|
||||
*/
|
||||
async function readdirOrEmpty(dirFrom: string): Promise<string[]> {
|
||||
try {
|
||||
return await readdir(dirFrom);
|
||||
} catch (error) {
|
||||
if (error instanceof FileNotFoundError) {
|
||||
return [];
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a directory and any necessary directories along the way. If the directory already exists,
|
||||
* nothing is done.
|
||||
*
|
||||
* @param absolutePath The path to the directory to create.
|
||||
* @throws FileSystemAccessError for any error.
|
||||
*/
|
||||
export async function mkdir(absolutePath: string): Promise<void> {
|
||||
try {
|
||||
await fsPromises.mkdir(absolutePath, { recursive: true });
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Alias for `mkdir`.
|
||||
* @see mkdir
|
||||
*/
|
||||
export const ensureDir: typeof mkdir = mkdir;
|
||||
|
||||
/**
|
||||
* Creates a temporary directory with the specified prefix.
|
||||
*
|
||||
* @param prefix The prefix to use for the temporary directory.
|
||||
* @returns The absolute path to the created temporary directory.
|
||||
* @throws FileSystemAccessError for any error.
|
||||
*/
|
||||
export async function mkdtemp(prefix: string): Promise<string> {
|
||||
try {
|
||||
return await getRealPath(
|
||||
await fsPromises.mkdtemp(path.join(tmpdir(), prefix)),
|
||||
);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the last change time of a file or directory's properties.
|
||||
* This includes changes to the file's metadata or contents.
|
||||
*
|
||||
* @param absolutePath The absolute path to the file or directory.
|
||||
* @returns The time of the last change as a Date object.
|
||||
* @throws FileNotFoundError if the path does not exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getChangeTime(absolutePath: string): Promise<Date> {
|
||||
try {
|
||||
const stats = await fsPromises.stat(absolutePath);
|
||||
return stats.ctime;
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the last access time of a file or directory's properties.
|
||||
*
|
||||
* @param absolutePath The absolute path to the file or directory.
|
||||
* @returns The time of the last access as a Date object.
|
||||
* @throws FileNotFoundError if the path does not exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getAccessTime(absolutePath: string): Promise<Date> {
|
||||
try {
|
||||
const stats = await fsPromises.stat(absolutePath);
|
||||
return stats.atime;
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the size of a file.
|
||||
*
|
||||
* @param absolutePath The absolute path to the file.
|
||||
* @returns The size of the file in bytes.
|
||||
* @throws FileNotFoundError if the path does not exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function getFileSize(absolutePath: string): Promise<number> {
|
||||
try {
|
||||
const stats = await fsPromises.stat(absolutePath);
|
||||
return stats.size;
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file or directory exists.
|
||||
*
|
||||
* @param absolutePath The absolute path to the file or directory.
|
||||
* @returns A boolean indicating whether the file or directory exists.
|
||||
*/
|
||||
export async function exists(absolutePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fsPromises.access(absolutePath);
|
||||
return true;
|
||||
} catch (_error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies a file from a source to a destination.
|
||||
* If the destination file already exists, it will be overwritten.
|
||||
*
|
||||
* @param source The path to the source file. It can't be a directory.
|
||||
* @param destination The path to the destination file. It can't be a directory.
|
||||
* @throws FileNotFoundError if the source path or the destination path doesn't exist.
|
||||
* @throws IsDirectoryError if the source path or the destination path is a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function copy(source: string, destination: string): Promise<void> {
|
||||
// We must proactively check if the source is a directory.
|
||||
// On modern Linux kernels (6.x+), the `copy_file_range` system call used by
|
||||
// Node.js may return success (0 bytes copied) when the source is a directory
|
||||
// instead of throwing EISDIR. Node.js interprets this 0-byte success as a
|
||||
// completed operation, resulting in no error being thrown.
|
||||
if (await isDirectory(source)) {
|
||||
throw new IsDirectoryError(source, undefined);
|
||||
}
|
||||
|
||||
try {
|
||||
await fsPromises.copyFile(source, destination);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
if (!(await exists(source))) {
|
||||
throw new FileNotFoundError(source, e);
|
||||
}
|
||||
if (!(await exists(destination))) {
|
||||
throw new FileNotFoundError(destination, e);
|
||||
}
|
||||
}
|
||||
|
||||
// On linux, trying to copy a directory will throw EISDIR,
|
||||
// on Windows it will throw EPERM, and on macOS it will throw ENOTSUP.
|
||||
if (e.code === "EISDIR" || e.code === "EPERM" || e.code === "ENOTSUP") {
|
||||
if (await isDirectory(source)) {
|
||||
throw new IsDirectoryError(source, e);
|
||||
}
|
||||
if (await isDirectory(destination)) {
|
||||
throw new IsDirectoryError(destination, e);
|
||||
}
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves a file or directory from a source to a destination. If the source is a
|
||||
* file and the destination is a file that already exists, it will be overwritten.
|
||||
* If the source is a directory and the destination is a directory, it needs to be empty.
|
||||
*
|
||||
* Note: This method may not work when moving files between different mount points
|
||||
* or file systems, as the underlying `fsPromises.rename` method may not support it.
|
||||
*
|
||||
* @param source The path to the source file or directory.
|
||||
* @param destination The path to the destination file or directory.
|
||||
* @throws FileNotFoundError if the source path or the destination path doesn't exist.
|
||||
* @throws DirectoryNotEmptyError if the source path is a directory and the destination
|
||||
* path is a directory that is not empty.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function move(source: string, destination: string): Promise<void> {
|
||||
try {
|
||||
await fsPromises.rename(source, destination);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
if (!(await exists(source))) {
|
||||
throw new FileNotFoundError(source, e);
|
||||
}
|
||||
if (!(await exists(path.dirname(destination)))) {
|
||||
throw new FileNotFoundError(destination, e);
|
||||
}
|
||||
}
|
||||
|
||||
// On linux, trying to move a non-empty directory will throw ENOTEMPTY,
|
||||
// while on Windows it will throw EPERM.
|
||||
if (e.code === "ENOTEMPTY" || e.code === "EPERM") {
|
||||
if (await isDirectory(source)) {
|
||||
throw new DirectoryNotEmptyError(destination, e);
|
||||
}
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a file or directory recursively.
|
||||
* Exceptions are ignored for non-existent paths.
|
||||
*
|
||||
* @param absolutePath The path to the file or directory to remove.
|
||||
* @throws FileSystemAccessError for any error, except for non-existent path errors.
|
||||
*/
|
||||
export async function remove(absolutePath: string): Promise<void> {
|
||||
try {
|
||||
await fsPromises.rm(absolutePath, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
maxRetries: 3,
|
||||
retryDelay: 300,
|
||||
});
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the permissions of a file or directory.
|
||||
*
|
||||
* @param absolutePath The path to the file or directory.
|
||||
* @param mode The permissions to set. It can be a string or a number representing the octal mode.
|
||||
* @throws FileNotFoundError if the path doesn't exist.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function chmod(
|
||||
absolutePath: string,
|
||||
mode: string | number,
|
||||
): Promise<void> {
|
||||
try {
|
||||
await fsPromises.chmod(absolutePath, mode);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
throw new FileNotFoundError(absolutePath, e);
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file with an empty content. If the file already exists, it will be overwritten.
|
||||
* If part of the path doesn't exist, it will be created.
|
||||
*
|
||||
* @param absolutePath The path to the file to create.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function createFile(absolutePath: string): Promise<void> {
|
||||
await writeUtf8File(absolutePath, "");
|
||||
}
|
||||
|
||||
/**
|
||||
* Empties a directory by recursively removing all its content. If the
|
||||
* directory doesn't exist, it will be created. The directory itself is
|
||||
* not removed.
|
||||
*
|
||||
* @param absolutePath The path to the directory to empty.
|
||||
* @throws NotADirectoryError if the path is not a directory.
|
||||
* @throws FileSystemAccessError for any other error.
|
||||
*/
|
||||
export async function emptyDir(absolutePath: string): Promise<void> {
|
||||
let isDir;
|
||||
let mode;
|
||||
try {
|
||||
const stats = await fsPromises.stat(absolutePath);
|
||||
isDir = stats.isDirectory();
|
||||
mode = stats.mode;
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
await mkdir(absolutePath);
|
||||
return;
|
||||
}
|
||||
|
||||
throw new FileSystemAccessError(e.message, e);
|
||||
}
|
||||
|
||||
if (!isDir) {
|
||||
throw new NotADirectoryError(absolutePath, new Error());
|
||||
}
|
||||
|
||||
await remove(absolutePath);
|
||||
await mkdir(absolutePath);
|
||||
// eslint-disable-next-line no-bitwise -- Bitwise is common in fs permissions
|
||||
await chmod(absolutePath, mode & 0o777);
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks for a file in the current directory and its parents.
|
||||
*
|
||||
* @param fileName The name of the file to look for.
|
||||
* @param from The directory to start the search from. Defaults to the current working directory.
|
||||
* @returns The absolute path to the file, or `undefined` if it wasn't found.
|
||||
*/
|
||||
export async function findUp(
|
||||
fileName: string,
|
||||
from?: string,
|
||||
): Promise<string | undefined> {
|
||||
if (from === undefined) {
|
||||
from = process.cwd();
|
||||
}
|
||||
|
||||
let currentDir = from;
|
||||
while (true) {
|
||||
const absolutePath = path.join(currentDir, fileName);
|
||||
if (await exists(absolutePath)) {
|
||||
return absolutePath;
|
||||
}
|
||||
|
||||
const parentDir = path.dirname(currentDir);
|
||||
if (parentDir === currentDir) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
currentDir = parentDir;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function uses some heuristics to check if a file is binary by reading the first bytesToCheck bytes from the file.
|
||||
*/
|
||||
export async function isBinaryFile(
|
||||
filePath: string,
|
||||
bytesToCheck = 8000,
|
||||
): Promise<boolean> {
|
||||
const fd = await fsPromises.open(filePath, "r");
|
||||
|
||||
const buffer = Buffer.alloc(bytesToCheck);
|
||||
const { bytesRead } = await fd.read(buffer, 0, bytesToCheck, 0);
|
||||
await fd.close();
|
||||
|
||||
let nonPrintable = 0;
|
||||
for (let i = 0; i < bytesRead; i++) {
|
||||
const byte = buffer[i];
|
||||
|
||||
// Allow common text ranges: tab, newline, carriage return, and printable ASCII
|
||||
if (
|
||||
byte === 9 || // tab
|
||||
byte === 10 || // newline
|
||||
byte === 13 || // carriage return
|
||||
(byte >= 32 && byte <= 126)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
nonPrintable++;
|
||||
}
|
||||
|
||||
// Heuristic: if more than ~30% of bytes are non-printable, assume binary
|
||||
return nonPrintable / bytesRead > 0.3;
|
||||
}
|
||||
|
||||
export {
|
||||
FileNotFoundError,
|
||||
FileSystemAccessError,
|
||||
InvalidFileFormatError,
|
||||
JsonSerializationError,
|
||||
FileAlreadyExistsError,
|
||||
NotADirectoryError,
|
||||
IsDirectoryError,
|
||||
DirectoryNotEmptyError,
|
||||
} from "./errors/fs.js";
|
||||
81
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/global-dir.ts
generated
vendored
Executable file
81
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/global-dir.ts
generated
vendored
Executable file
@@ -0,0 +1,81 @@
|
||||
import { ensureDir } from "./fs.js";
|
||||
import { generatePaths, HARDHAT_PACKAGE_NAME } from "./internal/global-dir.js";
|
||||
|
||||
// Internal override for testing purposes
|
||||
let _cacheDirOverride: string | undefined;
|
||||
|
||||
/**
|
||||
* Sets a mock cache directory for getCacheDir. This is intended for testing
|
||||
* purposes only, to isolate tests from the real global cache.
|
||||
*
|
||||
* @param dir The directory path to use as the mock cache directory.
|
||||
*/
|
||||
export function setMockCacheDir(dir: string): void {
|
||||
_cacheDirOverride = dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the mock cache directory set by setMockCacheDir.
|
||||
* Call this in test cleanup to restore normal behavior.
|
||||
*/
|
||||
export function resetMockCacheDir(): void {
|
||||
_cacheDirOverride = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the configuration directory path for a given package (defaults to "hardhat").
|
||||
* Ensures that the directory exists before returning the path.
|
||||
*
|
||||
* @param packageName The name of the package for which to generate paths. Defaults to "hardhat" if no package name is provided.
|
||||
* @returns The path to the hardhat configuration directory.
|
||||
* @throws FileSystemAccessError for any error.
|
||||
*/
|
||||
export async function getConfigDir(
|
||||
packageName: string = HARDHAT_PACKAGE_NAME,
|
||||
): Promise<string> {
|
||||
const { config } = await generatePaths(packageName);
|
||||
await ensureDir(config);
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the cache directory path for a given package (defaults to "hardhat").
|
||||
* Ensures that the directory exists before returning the path.
|
||||
*
|
||||
* For testing purposes, the cache directory can be overridden using
|
||||
* setMockCacheDir(). This is intended to isolate tests from the real
|
||||
* global cache.
|
||||
*
|
||||
* @param packageName The name of the package for which to generate paths. Defaults to "hardhat" if no package name is provided.
|
||||
* @returns The path to the hardhat cache directory.
|
||||
* @throws FileSystemAccessError for any error.
|
||||
*/
|
||||
export async function getCacheDir(
|
||||
packageName: string = HARDHAT_PACKAGE_NAME,
|
||||
): Promise<string> {
|
||||
// Allow override for testing purposes
|
||||
if (_cacheDirOverride !== undefined) {
|
||||
await ensureDir(_cacheDirOverride);
|
||||
return _cacheDirOverride;
|
||||
}
|
||||
|
||||
const { cache } = await generatePaths(packageName);
|
||||
await ensureDir(cache);
|
||||
return cache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the telemetry directory path for a given package (defaults to "hardhat").
|
||||
* Ensures that the directory exists before returning the path.
|
||||
*
|
||||
* @param packageName The name of the package for which to generate paths. Defaults to "hardhat" if no package name is provided.
|
||||
* @returns A promise that resolves to the path of the telemetry directory.
|
||||
* @throws FileSystemAccessError for any error.
|
||||
*/
|
||||
export async function getTelemetryDir(
|
||||
packageName: string = HARDHAT_PACKAGE_NAME,
|
||||
): Promise<string> {
|
||||
const { data } = await generatePaths(packageName);
|
||||
await ensureDir(data);
|
||||
return data;
|
||||
}
|
||||
232
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/hex.ts
generated
vendored
Executable file
232
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/hex.ts
generated
vendored
Executable file
@@ -0,0 +1,232 @@
|
||||
import { InvalidParameterError } from "./common-errors.js";
|
||||
import { padToEven } from "./internal/hex.js";
|
||||
|
||||
export type PrefixedHexString = `0x${string}`;
|
||||
|
||||
/**
|
||||
* Converts a non-negative safe integer or bigint to a hexadecimal string.
|
||||
*
|
||||
* @param value The number to convert.
|
||||
* @returns The hexadecimal representation of the number.
|
||||
* @throws InvalidParameterError If the input is not a safe integer or is negative.
|
||||
*/
|
||||
export function numberToHexString(value: number | bigint): PrefixedHexString {
|
||||
if (
|
||||
value < 0 ||
|
||||
(typeof value === "number" && !Number.isSafeInteger(value))
|
||||
) {
|
||||
throw new InvalidParameterError(
|
||||
`Expected a non-negative safe integer or bigint. Received: ${value}`,
|
||||
);
|
||||
}
|
||||
|
||||
return `0x${value.toString(16)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a hexadecimal string to a bigint. The string must be a valid
|
||||
* hexadecimal string. The string may be prefixed with "0x" or not. The
|
||||
* empty string is considered a valid hexadecimal string, so is the string
|
||||
* "0x" and will be converted to 0.
|
||||
*
|
||||
* @param hexString The hexadecimal string to convert. It must be a valid
|
||||
* hexadecimal string.
|
||||
* @returns The bigint representation of the hexadecimal string.
|
||||
* @throws InvalidParameterError If the input is not a hexadecimal string.
|
||||
*/
|
||||
export function hexStringToBigInt(hexString: string): bigint {
|
||||
if (!isHexString(hexString)) {
|
||||
throw new InvalidParameterError(
|
||||
`Expected a valid hexadecimal string. Received: ${hexString}`,
|
||||
);
|
||||
}
|
||||
// Prefix the string as it is required to make BigInt interpret it as a
|
||||
// hexadecimal number.
|
||||
let prefixedHexString = getPrefixedHexString(hexString);
|
||||
// BigInt does not support "0x" as a valid hexadecimal number, so we need to
|
||||
// add a zero after the prefix if the string is "0x".
|
||||
prefixedHexString = prefixedHexString === "0x" ? "0x0" : prefixedHexString;
|
||||
|
||||
const bigInt = BigInt(prefixedHexString);
|
||||
|
||||
return bigInt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a hexadecimal string to a number. The string must be a valid
|
||||
* hexadecimal string. The string may be prefixed with "0x" or not. The
|
||||
* empty string is considered a valid hexadecimal string, so is the string
|
||||
* "0x" and will be converted to 0.
|
||||
*
|
||||
* @param hexString The hexadecimal string to convert. It must be a valid
|
||||
* hexadecimal string.
|
||||
* @returns The number representation of the hexadecimal string.
|
||||
* @throws InvalidParameterError If the input is not a hexadecimal string or the value exceeds the Number.MAX_SAFE_INTEGER limit.
|
||||
*/
|
||||
export function hexStringToNumber(hexString: string): number {
|
||||
if (!isHexString(hexString)) {
|
||||
throw new InvalidParameterError(
|
||||
`Expected a valid hexadecimal string. Received: ${hexString}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Prefix the string as it is required to make parseInt interpret it as a
|
||||
// hexadecimal number.
|
||||
let prefixedHexString = getPrefixedHexString(hexString);
|
||||
|
||||
// Handle the special case where the string is "0x".
|
||||
prefixedHexString = prefixedHexString === "0x" ? "0x0" : prefixedHexString;
|
||||
|
||||
const numberValue = parseInt(prefixedHexString, 16);
|
||||
|
||||
if (numberValue > Number.MAX_SAFE_INTEGER) {
|
||||
throw new InvalidParameterError(
|
||||
`Value exceeds the safe integer limit. Received: ${hexString}`,
|
||||
);
|
||||
}
|
||||
|
||||
return numberValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Uint8Array to a hexadecimal string.
|
||||
*
|
||||
* @param bytes The bytes to convert.
|
||||
* @returns PrefixedHexString The hexadecimal representation of the bytes.
|
||||
*/
|
||||
export function bytesToHexString(bytes: Uint8Array): PrefixedHexString {
|
||||
return getPrefixedHexString(Buffer.from(bytes).toString("hex"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a hexadecimal string to a Uint8Array. The string must be a valid
|
||||
* hexadecimal string. The string may be prefixed with "0x" or not. The empty
|
||||
* string is considered a valid hexadecimal string, so is the string "0x" and
|
||||
* will be converted to Uint8Array([0]).
|
||||
*
|
||||
* @param hexString The hexadecimal string to convert.
|
||||
* @returns The byte representation of the hexadecimal string.
|
||||
* @throws InvalidParameterError If the input is not a hexadecimal string.
|
||||
*/
|
||||
export function hexStringToBytes(hexString: string): Uint8Array {
|
||||
if (!isHexString(hexString)) {
|
||||
throw new InvalidParameterError(
|
||||
`Expected a valid hexadecimal string. Received: ${hexString}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Pad the hex string if it's odd, as Buffer.from will truncate it
|
||||
// the last character if it's not a full byte.
|
||||
// See: https://nodejs.org/api/buffer.html#buffers-and-character-encodings
|
||||
const unprefixedHexString = getUnprefixedHexString(padToEven(hexString));
|
||||
return Uint8Array.from(Buffer.from(unprefixedHexString, "hex"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes and validates a string that represents a hexadecimal number.
|
||||
* The normalization process includes trimming any leading or trailing
|
||||
* whitespace, converting all characters to lowercase, and ensuring the string
|
||||
* has a "0x" prefix. The validation process checks if the string is a valid
|
||||
* hexadecimal string.
|
||||
*
|
||||
* @param hexString The hex string to normalize.
|
||||
* @returns The normalized hexadecimal string.
|
||||
*/
|
||||
export function normalizeHexString(hexString: string): PrefixedHexString {
|
||||
const normalizedHexString = hexString.trim().toLowerCase();
|
||||
|
||||
if (!isHexString(normalizedHexString)) {
|
||||
throw new InvalidParameterError(
|
||||
`Expected a valid hexadecimal string. Received: ${hexString}`,
|
||||
);
|
||||
}
|
||||
|
||||
return getPrefixedHexString(normalizedHexString);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a string starts with "0x" (case-insensitive).
|
||||
* This function does not validate the input.
|
||||
*
|
||||
* @param hexString The string to check.
|
||||
* @returns True if the string starts with "0x", false otherwise.
|
||||
*/
|
||||
export function isPrefixedHexString(
|
||||
hexString: string,
|
||||
): hexString is PrefixedHexString {
|
||||
return hexString.toLowerCase().startsWith("0x");
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the "0x" prefix from a hexadecimal string.
|
||||
* If the string is not prefixed, it is returned as is.
|
||||
* This function does not validate the input.
|
||||
*
|
||||
* @param hexString The hexadecimal string.
|
||||
* @returns The hexadecimal string without the "0x" prefix.
|
||||
*/
|
||||
export function getUnprefixedHexString(hexString: string): string {
|
||||
return isPrefixedHexString(hexString) ? hexString.substring(2) : hexString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the "0x" prefix to a hexadecimal string.
|
||||
* If the string is already prefixed, it is returned as is.
|
||||
* This function does not validate the input.
|
||||
*
|
||||
* @param hexString The hexadecimal string.
|
||||
* @returns The hexadecimal string with the "0x" prefix.
|
||||
*/
|
||||
export function getPrefixedHexString(hexString: string): PrefixedHexString {
|
||||
return isPrefixedHexString(hexString) ? hexString : `0x${hexString}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is a hexadecimal string. The string may be prefixed with
|
||||
* "0x" or not. The empty string is considered a valid hexadecimal string, so
|
||||
* is the string "0x".
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is a hexadecimal string, false otherwise.
|
||||
*/
|
||||
export function isHexString(value: unknown): boolean {
|
||||
return typeof value === "string" && /^(?:0x)?[0-9a-f]*$/i.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes leading zeros from a hexadecimal string, unless the string
|
||||
* represents the number zero ("0x0").
|
||||
* This function does not validate the input.
|
||||
*
|
||||
* @param hexString The hexadecimal string.
|
||||
* @returns The hexadecimal string without leading zeros.
|
||||
*/
|
||||
export function unpadHexString(hexString: string): string {
|
||||
const unprefixedHexString = getUnprefixedHexString(hexString);
|
||||
const unpaddedHexString = unprefixedHexString.replace(/^0+/, "");
|
||||
return unpaddedHexString === "" ? "0x0" : `0x${unpaddedHexString}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pads a hexadecimal string with zeros on the left to a specified length, or
|
||||
* truncates it from the left if it's too long.
|
||||
* This function does not validate the input.
|
||||
*
|
||||
* @param hexString The hexadecimal string to pad.
|
||||
* @param length The desired length of the hexadecimal string.
|
||||
* @returns The padded hexadecimal string.
|
||||
*/
|
||||
export function setLengthLeft(
|
||||
hexString: string,
|
||||
length: number,
|
||||
): PrefixedHexString {
|
||||
const unprefixedHexString = getUnprefixedHexString(hexString);
|
||||
|
||||
// if the string is longer than the desired length, truncate it
|
||||
if (unprefixedHexString.length > length) {
|
||||
return `0x${unprefixedHexString.slice(-length)}`;
|
||||
}
|
||||
|
||||
const paddedHexString = unprefixedHexString.padStart(length, "0");
|
||||
return `0x${paddedHexString}`;
|
||||
}
|
||||
129
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/bytecode.ts
generated
vendored
Executable file
129
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/bytecode.ts
generated
vendored
Executable file
@@ -0,0 +1,129 @@
|
||||
import type { PrefixedHexString } from "../hex.js";
|
||||
|
||||
import {
|
||||
AmbiguousLibraryNameError,
|
||||
InvalidLibraryAddressError,
|
||||
MissingLibrariesError,
|
||||
OverlappingLibrariesError,
|
||||
UnnecessaryLibraryError,
|
||||
} from "../errors/bytecode.js";
|
||||
import { isAddress } from "../eth.js";
|
||||
|
||||
export interface Artifact {
|
||||
bytecode: string;
|
||||
linkReferences: {
|
||||
[inputSourceName: string]: {
|
||||
[libraryName: string]: Array<{ start: number; length: number }>;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface LibraryLink {
|
||||
sourceName: string;
|
||||
libraryName: string;
|
||||
libraryFqn: string;
|
||||
address: string;
|
||||
}
|
||||
|
||||
export interface LibraryAddresses {
|
||||
[contractName: string]: PrefixedHexString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the provided library addresses are valid Ethereum addresses.
|
||||
* If any of them are not, an InvalidLibraryAddressError is thrown.
|
||||
*/
|
||||
export function checkProvidedLibraryAddresses(
|
||||
providedLibraries: LibraryAddresses,
|
||||
): void {
|
||||
const librariesWithInvalidAddresses: LibraryAddresses = {};
|
||||
for (const [name, address] of Object.entries(providedLibraries)) {
|
||||
if (!isAddress(address)) {
|
||||
librariesWithInvalidAddresses[name] = address;
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(librariesWithInvalidAddresses).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new InvalidLibraryAddressError(librariesWithInvalidAddresses);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the provided libraries can't be resolved to multiple libraries, or
|
||||
* that they are not needed by the contract. If any of these conditions are met,
|
||||
* an AmbiguousLibraryNameError or an UnnecessaryLibraryError is thrown.
|
||||
*/
|
||||
export function checkAmbiguousOrUnnecessaryLinks(
|
||||
providedLibraries: LibraryAddresses,
|
||||
neededLibraries: LibraryLink[],
|
||||
): void {
|
||||
const ambiguousLibraries: Record<string, LibraryLink[]> = {};
|
||||
const unnecessaryLibraries: string[] = [];
|
||||
|
||||
for (const providedLibraryName of Object.keys(providedLibraries)) {
|
||||
const matchingLibraries = neededLibraries.filter(
|
||||
({ libraryName, libraryFqn }) =>
|
||||
libraryName === providedLibraryName ||
|
||||
libraryFqn === providedLibraryName,
|
||||
);
|
||||
|
||||
if (matchingLibraries.length > 1) {
|
||||
ambiguousLibraries[providedLibraryName] = matchingLibraries;
|
||||
} else if (matchingLibraries.length === 0) {
|
||||
unnecessaryLibraries.push(providedLibraryName);
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(ambiguousLibraries).length > 0) {
|
||||
throw new AmbiguousLibraryNameError(ambiguousLibraries);
|
||||
}
|
||||
|
||||
if (unnecessaryLibraries.length > 0) {
|
||||
throw new UnnecessaryLibraryError(unnecessaryLibraries);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that each library is only provided once, either by its name or its
|
||||
* fully qualified name. If a library is provided more than once, an
|
||||
* OverlappingLibrariesError is thrown.
|
||||
*/
|
||||
export function checkOverlappingLibraryNames(
|
||||
providedLibraries: LibraryAddresses,
|
||||
neededLibraries: LibraryLink[],
|
||||
): void {
|
||||
const overlappingLibraries = neededLibraries
|
||||
.filter(
|
||||
({ libraryName, libraryFqn }) =>
|
||||
providedLibraries[libraryFqn] !== undefined &&
|
||||
providedLibraries[libraryName] !== undefined,
|
||||
)
|
||||
.map(({ libraryFqn }) => libraryFqn);
|
||||
|
||||
if (overlappingLibraries.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new OverlappingLibrariesError(overlappingLibraries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the needed libraries have all their addresses resolved. If an
|
||||
* address is missing, it means that the user didn't provide it in the
|
||||
* providedLibraries map. In that case, an MissingLibrariesError is thrown.
|
||||
*/
|
||||
export function checkMissingLibraryAddresses(
|
||||
neededLibraries: LibraryLink[],
|
||||
): void {
|
||||
const missingLibraries = neededLibraries
|
||||
.filter(({ address }) => address === undefined)
|
||||
.map(({ libraryFqn }) => libraryFqn);
|
||||
|
||||
if (missingLibraries.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new MissingLibrariesError(missingLibraries);
|
||||
}
|
||||
71
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/eth.ts
generated
vendored
Executable file
71
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/eth.ts
generated
vendored
Executable file
@@ -0,0 +1,71 @@
|
||||
import { utf8StringToBytes } from "../bytes.js";
|
||||
import { keccak256 } from "../crypto.js";
|
||||
import { bytesToHexString, getUnprefixedHexString } from "../hex.js";
|
||||
|
||||
class RandomBytesGenerator {
|
||||
#nextValue: Uint8Array;
|
||||
|
||||
private constructor(nextValue: Uint8Array) {
|
||||
this.#nextValue = nextValue;
|
||||
}
|
||||
|
||||
public static async create(seed: string): Promise<RandomBytesGenerator> {
|
||||
const nextValue = await keccak256(Buffer.from(seed));
|
||||
|
||||
return new RandomBytesGenerator(nextValue);
|
||||
}
|
||||
|
||||
public async next(): Promise<Uint8Array> {
|
||||
const valueToReturn = this.#nextValue;
|
||||
|
||||
this.#nextValue = await keccak256(this.#nextValue);
|
||||
|
||||
return valueToReturn;
|
||||
}
|
||||
}
|
||||
|
||||
let hashGenerator: RandomBytesGenerator | null = null;
|
||||
let addressGenerator: RandomBytesGenerator | null = null;
|
||||
|
||||
export async function getHashGenerator(): Promise<RandomBytesGenerator> {
|
||||
if (hashGenerator === null) {
|
||||
hashGenerator = await RandomBytesGenerator.create("hashSeed");
|
||||
}
|
||||
return hashGenerator;
|
||||
}
|
||||
|
||||
export async function getAddressGenerator(): Promise<RandomBytesGenerator> {
|
||||
if (addressGenerator === null) {
|
||||
addressGenerator = await RandomBytesGenerator.create("addressSeed");
|
||||
}
|
||||
return addressGenerator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is an Ethereum address and if the checksum is valid.
|
||||
* This method is a a an adaptation of the ethereumjs methods at this link:
|
||||
* https://github.com/ethereumjs/ethereumjs-monorepo/blob/47f388bfeec553519d11259fee7e7161a77b29b2/packages/util/src/account.ts#L440-L478
|
||||
* The main differences are:
|
||||
* - the two methods have been merged into one
|
||||
* - tha `eip1191ChainId` parameter has been removed.
|
||||
* - the code has been modified to use the `hardhat-utils` methods
|
||||
*
|
||||
*/
|
||||
export async function isValidChecksum(hexAddress: string): Promise<boolean> {
|
||||
const address = getUnprefixedHexString(hexAddress).toLowerCase();
|
||||
|
||||
const bytes = utf8StringToBytes(address);
|
||||
|
||||
const hash = bytesToHexString(await keccak256(bytes)).slice(2);
|
||||
|
||||
let ret = "";
|
||||
for (let i = 0; i < address.length; i++) {
|
||||
if (parseInt(hash[i], 16) >= 8) {
|
||||
ret += address[i].toUpperCase();
|
||||
} else {
|
||||
ret += address[i];
|
||||
}
|
||||
}
|
||||
|
||||
return `0x${ret}` === hexAddress;
|
||||
}
|
||||
260
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/format.ts
generated
vendored
Executable file
260
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/format.ts
generated
vendored
Executable file
@@ -0,0 +1,260 @@
|
||||
import type { TableItem } from "../format.js";
|
||||
|
||||
/**
|
||||
* Calculate the display width of a string by removing ANSI escape codes.
|
||||
*
|
||||
* NOTE: This implementation only removes basic ANSI color/style codes and may
|
||||
* not handle all escape sequences (e.g., cursor movement, complex control
|
||||
* sequences).
|
||||
*/
|
||||
export function getStringWidth(str: string): number {
|
||||
// Remove ANSI escape codes if present
|
||||
const stripped = str.replace(/\u001b\[[0-9;]*m/g, "");
|
||||
return stripped.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the minimum width needed by each column in the table
|
||||
* to fit its content (accounting for ANSI color codes).
|
||||
*/
|
||||
export function getColumnWidths(items: TableItem[]): number[] {
|
||||
const columnWidths: number[] = [];
|
||||
|
||||
for (const item of items) {
|
||||
if (item.type === "row" || item.type === "header") {
|
||||
item.cells.forEach((cell, i) => {
|
||||
columnWidths[i] = Math.max(columnWidths[i] ?? 0, getStringWidth(cell));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return columnWidths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the inner width needed to fit the rows and headers
|
||||
* (excludes borders, which are added during rendering).
|
||||
*
|
||||
* Each column is padded by 1 space on each side, and columns are
|
||||
* separated by " │ " (3 spaces).
|
||||
*/
|
||||
export function getContentWidth(columnWidths: number[]): number {
|
||||
return (
|
||||
columnWidths.reduce((sum, w) => sum + w, 0) +
|
||||
(columnWidths.length - 1) * 3 +
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the inner width needed to fit titles and section headers
|
||||
* (excludes borders, which are added during rendering).
|
||||
*
|
||||
* Each title/header is padded by 1 space on each side.
|
||||
* Accounts for ANSI color codes.
|
||||
*/
|
||||
export function getHeadingWidth(items: TableItem[]): number {
|
||||
let headingWidth = 0;
|
||||
for (const item of items) {
|
||||
if (item.type === "section-header" || item.type === "title") {
|
||||
headingWidth = Math.max(headingWidth, getStringWidth(item.text) + 2);
|
||||
}
|
||||
}
|
||||
return headingWidth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the width needed for unused columns when a row/header has fewer
|
||||
* cells than the total column count (e.g., if table has 6 columns but row
|
||||
* only has 2 cells, calculates space for the remaining 4 columns).
|
||||
*/
|
||||
export function getUnusedColumnsWidth(
|
||||
columnWidths: number[],
|
||||
previousCellCount: number,
|
||||
): number {
|
||||
const remainingWidths = columnWidths.slice(previousCellCount);
|
||||
return remainingWidths.reduce((sum, w) => sum + w + 3, 0) - 3;
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a horizontal rule segment by repeating a character for each column
|
||||
* with padding, joined by a separator (e.g., "─────┼─────┼─────").
|
||||
*/
|
||||
export function renderRuleSegment(
|
||||
columnWidths: number[],
|
||||
char: string,
|
||||
joiner: string,
|
||||
): string {
|
||||
return columnWidths.map((w) => char.repeat(w + 2)).join(joiner);
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a complete horizontal rule with left and right borders
|
||||
* (e.g., "╟─────┼─────┼─────╢").
|
||||
*/
|
||||
export function renderHorizontalRule(
|
||||
leftBorder: string,
|
||||
columnWidths: number[],
|
||||
char: string,
|
||||
joiner: string,
|
||||
rightBorder: string,
|
||||
): string {
|
||||
return (
|
||||
leftBorder + renderRuleSegment(columnWidths, char, joiner) + rightBorder
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a content line containing cells from either a header or row.
|
||||
*
|
||||
* Handles two cases:
|
||||
* - Full width: When all columns are used, cells are separated by " │ " and
|
||||
* line ends with " ║" (e.g., "║ cell1 │ cell2 │ cell3 ║")
|
||||
* - Short line: When fewer columns are used, active cells are followed by
|
||||
* " │ " and empty space, ending with "║" (e.g., "║ cell1 │ cell2 │ ║")
|
||||
*
|
||||
* Accounts for ANSI color codes when padding cells.
|
||||
*/
|
||||
export function renderContentLine(
|
||||
cells: string[],
|
||||
columnWidths: number[],
|
||||
currentCellCount: number,
|
||||
): string {
|
||||
if (currentCellCount === columnWidths.length) {
|
||||
return (
|
||||
"║ " +
|
||||
cells
|
||||
.map((cell, j) => {
|
||||
const displayWidth = getStringWidth(cell);
|
||||
const actualLength = cell.length;
|
||||
// Adjust padding to account for ANSI escape codes
|
||||
return cell.padEnd(columnWidths[j] + actualLength - displayWidth);
|
||||
})
|
||||
.join(" │ ") +
|
||||
" ║"
|
||||
);
|
||||
} else {
|
||||
const usedWidths = columnWidths.slice(0, currentCellCount);
|
||||
const remainingWidth = getUnusedColumnsWidth(
|
||||
columnWidths,
|
||||
currentCellCount,
|
||||
);
|
||||
return (
|
||||
"║ " +
|
||||
cells
|
||||
.map((cell, j) => {
|
||||
const displayWidth = getStringWidth(cell);
|
||||
const actualLength = cell.length;
|
||||
// Adjust padding to account for ANSI escape codes
|
||||
return cell.padEnd(usedWidths[j] + actualLength - displayWidth);
|
||||
})
|
||||
.join(" │ ") +
|
||||
" │ " +
|
||||
" ".repeat(remainingWidth + 1) +
|
||||
"║"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the horizontal rule that appears above a header row.
|
||||
*
|
||||
* Handles three cases:
|
||||
* - Transition rule: When going from more columns to fewer, shows ┴ marks
|
||||
* where columns collapse (e.g., "╟───┼───┼───┴───┴───╢")
|
||||
* - Full width: When header uses all columns (e.g., "╟───┬───┬───╢" or "╟───┼───┼───╢")
|
||||
* - Short header: When header uses fewer columns than max (e.g., "╟───┬─────────╢")
|
||||
*
|
||||
* The innerJoiner determines the separator character: ┬ after section-header, ┼ otherwise.
|
||||
*/
|
||||
export function renderHeaderOpen(
|
||||
columnWidths: number[],
|
||||
currentCellCount: number,
|
||||
innerJoiner: string,
|
||||
needsTransition: boolean,
|
||||
): string {
|
||||
if (needsTransition) {
|
||||
const usedWidths = columnWidths.slice(0, currentCellCount);
|
||||
const collapsingWidths = columnWidths.slice(currentCellCount);
|
||||
return (
|
||||
"╟" +
|
||||
renderRuleSegment(usedWidths, "─", "┼") +
|
||||
"┼" +
|
||||
renderRuleSegment(collapsingWidths, "─", "┴") +
|
||||
"╢"
|
||||
);
|
||||
} else if (currentCellCount === columnWidths.length) {
|
||||
return renderHorizontalRule("╟", columnWidths, "─", innerJoiner, "╢");
|
||||
} else {
|
||||
const usedWidths = columnWidths.slice(0, currentCellCount);
|
||||
const remainingWidth = getUnusedColumnsWidth(
|
||||
columnWidths,
|
||||
currentCellCount,
|
||||
);
|
||||
return (
|
||||
"╟" +
|
||||
renderRuleSegment(usedWidths, "─", innerJoiner) +
|
||||
innerJoiner +
|
||||
"─".repeat(remainingWidth + 2) +
|
||||
"╢"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the horizontal rule that appears above a row.
|
||||
*
|
||||
* Handles two cases:
|
||||
* - Full width: When row uses all columns, renders with ┼ joiners and
|
||||
* ends with ╢ (e.g., "╟───┼───┼───╢")
|
||||
* - Short row: When row uses fewer columns, renders active columns with
|
||||
* ┼ joiners, ends with ┤, then fills remaining space and ends with ║
|
||||
* (e.g., "╟───┼───┤ ║")
|
||||
*/
|
||||
export function renderRowSeparator(
|
||||
columnWidths: number[],
|
||||
currentCellCount: number,
|
||||
): string {
|
||||
if (currentCellCount === columnWidths.length) {
|
||||
return renderHorizontalRule("╟", columnWidths, "─", "┼", "╢");
|
||||
} else {
|
||||
// Short row - ends with ┤ instead of ╢
|
||||
const usedWidths = columnWidths.slice(0, currentCellCount);
|
||||
const remainingWidth = getUnusedColumnsWidth(
|
||||
columnWidths,
|
||||
currentCellCount,
|
||||
);
|
||||
return (
|
||||
"╟" +
|
||||
renderRuleSegment(usedWidths, "─", "┼") +
|
||||
"┤" +
|
||||
" ".repeat(remainingWidth + 2) +
|
||||
"║"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders the section's bottom border, placing ╧ marks under column
|
||||
* separators where the last row/header had cells (e.g., if the last row
|
||||
* looked like "║ a │ b │ ║", the bottom border would be
|
||||
* "╚═══╧═══╧═══════╝").
|
||||
*/
|
||||
export function renderSectionClose(
|
||||
columnWidths: number[],
|
||||
previousCellCount: number,
|
||||
): string {
|
||||
if (previousCellCount === columnWidths.length) {
|
||||
return renderHorizontalRule("╚", columnWidths, "═", "╧", "╝");
|
||||
} else {
|
||||
const usedWidths = columnWidths.slice(0, previousCellCount);
|
||||
const unusedWidth = getUnusedColumnsWidth(columnWidths, previousCellCount);
|
||||
return (
|
||||
"╚" +
|
||||
renderRuleSegment(usedWidths, "═", "╧") +
|
||||
"╧" +
|
||||
renderRuleSegment([unusedWidth], "═", "") +
|
||||
"╝"
|
||||
);
|
||||
}
|
||||
}
|
||||
10
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/global-dir.ts
generated
vendored
Executable file
10
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/global-dir.ts
generated
vendored
Executable file
@@ -0,0 +1,10 @@
|
||||
import type envPaths from "env-paths";
|
||||
|
||||
export const HARDHAT_PACKAGE_NAME = "hardhat";
|
||||
|
||||
export async function generatePaths(
|
||||
packageName: string,
|
||||
): Promise<envPaths.Paths> {
|
||||
const { default: envPaths } = await import("env-paths");
|
||||
return envPaths(packageName);
|
||||
}
|
||||
21
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/hex.ts
generated
vendored
Executable file
21
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/hex.ts
generated
vendored
Executable file
@@ -0,0 +1,21 @@
|
||||
import {
|
||||
getPrefixedHexString,
|
||||
getUnprefixedHexString,
|
||||
isPrefixedHexString,
|
||||
} from "../hex.js";
|
||||
|
||||
export function padToEven(value: string): string {
|
||||
const isPrefixed = isPrefixedHexString(value);
|
||||
const unprefixed = getUnprefixedHexString(value);
|
||||
|
||||
let padded;
|
||||
if (unprefixed.length === 0) {
|
||||
// Pad the empty string with a single zero, as Buffer.from([]) will not
|
||||
// interpret it correctly otherwise.
|
||||
padded = "00";
|
||||
} else {
|
||||
padded = unprefixed.length % 2 === 0 ? unprefixed : `0${unprefixed}`;
|
||||
}
|
||||
|
||||
return isPrefixed ? getPrefixedHexString(padded) : padded;
|
||||
}
|
||||
91
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/lang.ts
generated
vendored
Executable file
91
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/lang.ts
generated
vendored
Executable file
@@ -0,0 +1,91 @@
|
||||
import type rfdcT from "rfdc";
|
||||
|
||||
import { isObject } from "../lang.js";
|
||||
|
||||
let clone: ReturnType<typeof rfdcT> | null = null;
|
||||
export async function getDeepCloneFunction(): Promise<<T>(input: T) => T> {
|
||||
const { default: rfdc } = await import("rfdc");
|
||||
|
||||
if (clone === null) {
|
||||
clone = rfdc();
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
export function deepMergeImpl<T extends object, S extends object>(
|
||||
target: T,
|
||||
source: S,
|
||||
shouldOverwriteUndefined: boolean,
|
||||
): T & S {
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- Result will include properties from both T and S, but starts with only T */
|
||||
const result = { ...target } as T & S;
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- All keys come from S, TypeScript can't infer the union of string and symbol keys */
|
||||
const keys = [
|
||||
...Object.keys(source),
|
||||
...Object.getOwnPropertySymbols(source),
|
||||
] as Array<keyof S>;
|
||||
|
||||
for (const key of keys) {
|
||||
if (
|
||||
isObject(source[key]) &&
|
||||
// Only merge plain objects, not class instances
|
||||
Object.getPrototypeOf(source[key]) === Object.prototype
|
||||
) {
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- result[key] will have the correct type after assignment but TS can't infer it */
|
||||
result[key] = deepMergeImpl(
|
||||
result[key] ?? {},
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- source[key] is known to be from S but TS can't infer it */
|
||||
source[key] as S,
|
||||
shouldOverwriteUndefined,
|
||||
) as (T & S)[Extract<keyof S, string>];
|
||||
} else if (shouldOverwriteUndefined || source[key] !== undefined) {
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- result[key] will have the correct type after assignment but TS can't infer it */
|
||||
result[key] = source[key] as (T & S)[Extract<keyof S, string>];
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
let cachedCustomEqual: ((a: unknown, b: unknown) => boolean) | undefined;
|
||||
|
||||
/**
|
||||
* Performs a custom deep equality check using `fast-equals` with specific overrides.
|
||||
*
|
||||
* @param x The first value to compare.
|
||||
* @param y The second value to compare.
|
||||
* @returns A promise that resolves to true if the values are deeply equal, false otherwise.
|
||||
*/
|
||||
export async function customFastEqual<T>(x: T, y: T): Promise<boolean> {
|
||||
if (cachedCustomEqual !== undefined) {
|
||||
return cachedCustomEqual(x, y);
|
||||
}
|
||||
|
||||
const { createCustomEqual } = await import("fast-equals");
|
||||
|
||||
cachedCustomEqual = createCustomEqual({
|
||||
createCustomConfig: (defaultConfig) => ({
|
||||
areTypedArraysEqual: (a, b, state) => {
|
||||
// Node.js uses an internal pool for small Buffers, so multiple Buffers can
|
||||
// share the same underlying ArrayBuffer while having different byteOffsets.
|
||||
// Structural equality checks (e.g. deep equality) consider offset and length
|
||||
// and may fail even if the contents are identical.
|
||||
// We use Buffer.equals() to compare content only.
|
||||
if (Buffer.isBuffer(a) && Buffer.isBuffer(b)) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
return defaultConfig.areTypedArraysEqual(a, b, state);
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
return cachedCustomEqual(x, y);
|
||||
}
|
||||
15
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/package.ts
generated
vendored
Executable file
15
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/package.ts
generated
vendored
Executable file
@@ -0,0 +1,15 @@
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
export function getFilePath(filePathOrUrl: string): string | undefined {
|
||||
if (filePathOrUrl.startsWith("file://")) {
|
||||
try {
|
||||
// This can throw on Windows if the url is malformed,
|
||||
// so we catch it and return undefined
|
||||
return fileURLToPath(filePathOrUrl);
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return filePathOrUrl;
|
||||
}
|
||||
23
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/panic-errors.ts
generated
vendored
Executable file
23
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/panic-errors.ts
generated
vendored
Executable file
@@ -0,0 +1,23 @@
|
||||
export function panicErrorCodeToReason(errorCode: bigint): string | undefined {
|
||||
// eslint-disable-next-line @typescript-eslint/switch-exhaustiveness-check -- we are only covering some of the integer range
|
||||
switch (errorCode) {
|
||||
case 0x1n:
|
||||
return "Assertion error";
|
||||
case 0x11n:
|
||||
return "Arithmetic operation overflowed outside of an unchecked block";
|
||||
case 0x12n:
|
||||
return "Division or modulo division by zero";
|
||||
case 0x21n:
|
||||
return "Tried to convert a value into an enum, but the value was too big or negative";
|
||||
case 0x22n:
|
||||
return "Incorrectly encoded storage byte array";
|
||||
case 0x31n:
|
||||
return ".pop() was called on an empty array";
|
||||
case 0x32n:
|
||||
return "Array accessed at an out-of-bounds or negative index";
|
||||
case 0x41n:
|
||||
return "Too much memory was allocated, or an array was created that is too large";
|
||||
case 0x51n:
|
||||
return "Called a zero-initialized variable of internal function type";
|
||||
}
|
||||
}
|
||||
166
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/request.ts
generated
vendored
Executable file
166
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/internal/request.ts
generated
vendored
Executable file
@@ -0,0 +1,166 @@
|
||||
import type { DispatcherOptions, RequestOptions } from "../request.js";
|
||||
import type EventEmitter from "node:events";
|
||||
import type UndiciT from "undici";
|
||||
|
||||
import crypto from "node:crypto";
|
||||
import path from "node:path";
|
||||
|
||||
import { mkdir } from "../fs.js";
|
||||
import { isObject } from "../lang.js";
|
||||
import {
|
||||
ConnectionRefusedError,
|
||||
DEFAULT_MAX_REDIRECTS,
|
||||
DEFAULT_TIMEOUT_IN_MILLISECONDS,
|
||||
DEFAULT_USER_AGENT,
|
||||
getDispatcher,
|
||||
RequestTimeoutError,
|
||||
ResponseStatusCodeError,
|
||||
} from "../request.js";
|
||||
|
||||
export async function generateTempFilePath(filePath: string): Promise<string> {
|
||||
const { dir, ext, name } = path.parse(filePath);
|
||||
|
||||
await mkdir(dir);
|
||||
|
||||
return path.format({
|
||||
dir,
|
||||
ext,
|
||||
name: `tmp-${name}-${crypto.randomBytes(8).toString("hex")}`,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getBaseRequestOptions(
|
||||
requestUrl: string,
|
||||
{ extraHeaders, abortSignal, queryParams }: RequestOptions = {},
|
||||
dispatcherOrDispatcherOptions?: UndiciT.Dispatcher | DispatcherOptions,
|
||||
): Promise<{
|
||||
query?: Record<string, any> | undefined;
|
||||
signal?: EventEmitter | AbortSignal | undefined;
|
||||
dispatcher: UndiciT.Dispatcher;
|
||||
headers: Record<string, string>;
|
||||
throwOnError: true;
|
||||
}> {
|
||||
const { Dispatcher } = await import("undici");
|
||||
const dispatcher =
|
||||
dispatcherOrDispatcherOptions instanceof Dispatcher
|
||||
? dispatcherOrDispatcherOptions
|
||||
: await getDispatcher(requestUrl, dispatcherOrDispatcherOptions);
|
||||
|
||||
// We could use the global dispatcher if neither dispatcher nor dispatcherOptions were passed,
|
||||
// but there's no way to configure it, so we don't do it.
|
||||
// https://github.com/nodejs/undici/blob/961b76ad7cac17d23580d172702e11a080974f5d/lib/global.js#L9
|
||||
return {
|
||||
dispatcher,
|
||||
headers: getHeaders(requestUrl, extraHeaders),
|
||||
throwOnError: true,
|
||||
...(abortSignal !== undefined ? { signal: abortSignal } : {}),
|
||||
...(queryParams !== undefined ? { query: queryParams } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function getHeaders(
|
||||
requestUrl: string,
|
||||
extraHeaders: Record<string, string> = {},
|
||||
): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
...extraHeaders,
|
||||
"User-Agent": extraHeaders["User-Agent"] ?? DEFAULT_USER_AGENT,
|
||||
};
|
||||
|
||||
const authHeader = getAuthHeader(requestUrl);
|
||||
if (authHeader !== undefined) {
|
||||
headers.Authorization = authHeader;
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function getAuthHeader(requestUrl: string): string | undefined {
|
||||
const parsedUrl = new URL(requestUrl);
|
||||
if (parsedUrl.username === "") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return `Basic ${Buffer.from(
|
||||
`${parsedUrl.username}:${parsedUrl.password}`,
|
||||
).toString("base64")}`;
|
||||
}
|
||||
|
||||
export async function getProxyDispatcher(
|
||||
proxy: string,
|
||||
options: Omit<UndiciT.ProxyAgent.Options, "uri">,
|
||||
): Promise<UndiciT.ProxyAgent> {
|
||||
const { ProxyAgent } = await import("undici");
|
||||
|
||||
return new ProxyAgent({
|
||||
uri: proxy,
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getPoolDispatcher(
|
||||
requestUrl: string,
|
||||
options: UndiciT.Pool.Options,
|
||||
): Promise<UndiciT.Pool> {
|
||||
const { Pool } = await import("undici");
|
||||
|
||||
const parsedUrl = new URL(requestUrl);
|
||||
return new Pool(parsedUrl.origin, options);
|
||||
}
|
||||
|
||||
export async function getBasicDispatcher(
|
||||
options: UndiciT.Agent.Options,
|
||||
): Promise<UndiciT.Agent> {
|
||||
const { Agent } = await import("undici");
|
||||
|
||||
return new Agent(options);
|
||||
}
|
||||
|
||||
export function getBaseDispatcherOptions(
|
||||
timeout: number = DEFAULT_TIMEOUT_IN_MILLISECONDS,
|
||||
isTestDispatcher: boolean = false,
|
||||
): UndiciT.Client.Options {
|
||||
// These have good defaults for production, but need to be tweaked to avoid hanging tests.
|
||||
// https://github.com/nodejs/undici/blob/961b76ad7cac17d23580d172702e11a080974f5d/docs/docs/best-practices/writing-tests.md
|
||||
const keepAliveTimeouts = isTestDispatcher
|
||||
? { keepAliveTimeout: 10, keepAliveMaxTimeout: 10 }
|
||||
: {};
|
||||
|
||||
return {
|
||||
headersTimeout: timeout,
|
||||
bodyTimeout: timeout,
|
||||
connectTimeout: timeout,
|
||||
maxRedirections: DEFAULT_MAX_REDIRECTS,
|
||||
...keepAliveTimeouts,
|
||||
};
|
||||
}
|
||||
|
||||
export function sanitizeUrl(requestUrl: string): string {
|
||||
const parsedUrl = new URL(requestUrl);
|
||||
// Return only the origin to avoid leaking sensitive information
|
||||
return parsedUrl.origin;
|
||||
}
|
||||
|
||||
export function handleError(e: Error, requestUrl: string): void {
|
||||
let causeCode: unknown;
|
||||
if (isObject(e.cause)) {
|
||||
causeCode = e.cause.code;
|
||||
}
|
||||
const errorCode = "code" in e ? e.code : causeCode;
|
||||
|
||||
if (errorCode === "ECONNREFUSED") {
|
||||
throw new ConnectionRefusedError(requestUrl, e);
|
||||
}
|
||||
|
||||
if (
|
||||
errorCode === "UND_ERR_CONNECT_TIMEOUT" ||
|
||||
errorCode === "UND_ERR_HEADERS_TIMEOUT" ||
|
||||
errorCode === "UND_ERR_BODY_TIMEOUT"
|
||||
) {
|
||||
throw new RequestTimeoutError(requestUrl, e);
|
||||
}
|
||||
|
||||
if (errorCode === "UND_ERR_RESPONSE_STATUS_CODE") {
|
||||
throw new ResponseStatusCodeError(requestUrl, e);
|
||||
}
|
||||
}
|
||||
120
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/lang.ts
generated
vendored
Executable file
120
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/lang.ts
generated
vendored
Executable file
@@ -0,0 +1,120 @@
|
||||
import {
|
||||
customFastEqual,
|
||||
deepMergeImpl,
|
||||
getDeepCloneFunction,
|
||||
} from "./internal/lang.js";
|
||||
|
||||
/**
|
||||
* Creates a deep clone of the provided value.
|
||||
*
|
||||
* @param value The value to clone.
|
||||
* @returns The deep clone of the provided value.
|
||||
*/
|
||||
export async function deepClone<T>(value: T): Promise<T> {
|
||||
const _deepClone = await getDeepCloneFunction();
|
||||
|
||||
return _deepClone<T>(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if two values are deeply equal.
|
||||
*
|
||||
* @param x The first value to compare.
|
||||
* @param y The second value to compare.
|
||||
* @returns True if the values are deeply equal, false otherwise.
|
||||
*/
|
||||
export async function deepEqual<T>(x: T, y: T): Promise<boolean> {
|
||||
return customFastEqual(x, y);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deeply merges two objects.
|
||||
*
|
||||
* @remarks
|
||||
* - Arrays or `undefined` values are not valid inputs.
|
||||
* - Functions: If a function exists in both the target and source, the source
|
||||
* function overwrites the target.
|
||||
* - Symbol properties: Symbol-keyed properties are merged just like string
|
||||
* keys.
|
||||
* - Class instances: Class instances are not merged recursively. If a class
|
||||
* instance exists in the source, it will replace the one in the target.
|
||||
*
|
||||
* @param target The target object to merge into.
|
||||
* @param source The source object to merge from.
|
||||
* @param shouldOverwriteUndefined If true, properties with `undefined` values
|
||||
* in the source will overwrite those in the target. Default is true.
|
||||
* @returns A new object containing the deeply merged properties.
|
||||
*
|
||||
* @example
|
||||
* deepMerge({ a: { b: 1 } }, { a: { c: 2 } }) // => { a: { b: 1, c: 2 } }
|
||||
*
|
||||
* deepMerge(
|
||||
* { a: { fn: () => "from target" } },
|
||||
* { a: { fn: () => "from source" } }
|
||||
* ) // => { a: { fn: () => "from source" } }
|
||||
*/
|
||||
export function deepMerge<T extends object, U extends object>(
|
||||
target: T,
|
||||
source: U,
|
||||
shouldOverwriteUndefined: boolean = true,
|
||||
): T & U {
|
||||
return deepMergeImpl(target, source, shouldOverwriteUndefined);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a value is an object. This function returns false for arrays.
|
||||
*
|
||||
* @param value The value to check.
|
||||
* @returns True if the value is an object, false otherwise.
|
||||
*/
|
||||
export function isObject(
|
||||
value: unknown,
|
||||
): value is Record<string | symbol, unknown> {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pauses the execution for the specified number of seconds.
|
||||
*
|
||||
* @param seconds The number of seconds to pause the execution.
|
||||
* @returns A promise that resolves after the specified number of seconds.
|
||||
*/
|
||||
export async function sleep(seconds: number): Promise<void> {
|
||||
await new Promise((resolve) => setTimeout(resolve, seconds * 1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* Binds all methods of an object to the object itself, so that they can be
|
||||
* assigned to an independent variable and still work.
|
||||
*
|
||||
* @param obj The object, which can be an instance of a class.
|
||||
*/
|
||||
export function bindAllMethods<ObjectT extends object>(obj: ObjectT): void {
|
||||
const prototype = Object.getPrototypeOf(obj);
|
||||
const prototypeKeys =
|
||||
prototype !== null ? Object.getOwnPropertyNames(prototype) : [];
|
||||
|
||||
const keys = [...prototypeKeys, ...Object.getOwnPropertyNames(obj)];
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions --
|
||||
typescript can't express this in a safe way, so we use any here */
|
||||
const objAsAny = obj as any;
|
||||
|
||||
// Exclude methods that should not be rebound (constructor, Object.prototype methods, etc.)
|
||||
const EXCLUDED_METHODS = [
|
||||
"constructor",
|
||||
"hasOwnProperty",
|
||||
"isPrototypeOf",
|
||||
"propertyIsEnumerable",
|
||||
"toLocaleString",
|
||||
"toString",
|
||||
"valueOf",
|
||||
];
|
||||
|
||||
for (const key of keys) {
|
||||
const val = objAsAny[key];
|
||||
if (typeof val === "function" && !EXCLUDED_METHODS.includes(key)) {
|
||||
objAsAny[key] = val.bind(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
38
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/number.ts
generated
vendored
Executable file
38
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/number.ts
generated
vendored
Executable file
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
bytesToHexString,
|
||||
hexStringToBytes,
|
||||
hexStringToBigInt,
|
||||
numberToHexString,
|
||||
hexStringToNumber,
|
||||
} from "./hex.js";
|
||||
|
||||
/**
|
||||
* Converts a Uint8Array to a bigint.
|
||||
*
|
||||
* @param bytes The Uint8Array to convert.
|
||||
* @returns The converted bigint.
|
||||
*/
|
||||
export function bytesToBigInt(bytes: Uint8Array): bigint {
|
||||
return hexStringToBigInt(bytesToHexString(bytes));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Uint8Array to a number.
|
||||
*
|
||||
* @param bytes The Uint8Array to convert.
|
||||
* @returns The converted number.
|
||||
*/
|
||||
export function bytesToNumber(bytes: Uint8Array): number {
|
||||
return hexStringToNumber(bytesToHexString(bytes));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a non-negative safe integer or bigint to a Uint8Array.
|
||||
*
|
||||
* @param value The number or bigint to convert.
|
||||
* @returns The converted Uint8Array.
|
||||
* @throws InvalidParameterError If the input is not a safe integer or is negative.
|
||||
*/
|
||||
export function numberToBytes(value: number | bigint): Uint8Array {
|
||||
return hexStringToBytes(numberToHexString(value));
|
||||
}
|
||||
169
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/package.ts
generated
vendored
Executable file
169
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/package.ts
generated
vendored
Executable file
@@ -0,0 +1,169 @@
|
||||
import { createRequire } from "node:module";
|
||||
import path from "node:path";
|
||||
|
||||
import { ensureError } from "./error.js";
|
||||
import {
|
||||
PackageJsonNotFoundError,
|
||||
PackageJsonReadError,
|
||||
} from "./errors/package.js";
|
||||
import { exists, findUp, getRealPath, readJsonFile } from "./fs.js";
|
||||
import { getFilePath } from "./internal/package.js";
|
||||
import { ensureTrailingSlash } from "./string.js";
|
||||
|
||||
/* Adapted from `resolve.exports`. License: https://github.com/lukeed/resolve.exports/blob/master/license */
|
||||
|
||||
export type PackageExports =
|
||||
| PackageExportPath
|
||||
| {
|
||||
[path: PackageExportsEntry]: PackageExportsValue;
|
||||
[condition: string]: PackageExportsValue;
|
||||
};
|
||||
|
||||
/** Allows "." and "./{name}" */
|
||||
export type PackageExportsEntry = `.${string}`;
|
||||
|
||||
/** Internal path */
|
||||
export type PackageExportPath = `./${string}`;
|
||||
|
||||
export type PackageExportsValue =
|
||||
| PackageExportPath
|
||||
| null
|
||||
| {
|
||||
[condition: string]: PackageExportsValue;
|
||||
}
|
||||
| PackageExportsValue[];
|
||||
|
||||
/* End of `resolve.exports` adaptation */
|
||||
|
||||
/**
|
||||
* The structure of a `package.json` file. This is a subset of the actual
|
||||
* `package.json` file, if you need to access other fields you add them here.
|
||||
*/
|
||||
export interface PackageJson {
|
||||
name: string;
|
||||
version: string;
|
||||
description?: string;
|
||||
type?: "commonjs" | "module";
|
||||
engines?: {
|
||||
node?: string;
|
||||
};
|
||||
exports?: PackageExports;
|
||||
dependencies?: Record<string, string>;
|
||||
devDependencies?: Record<string, string>;
|
||||
peerDependencies?: Record<string, string>;
|
||||
optionalDependencies?: Record<string, string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches for the nearest `package.json` file, starting from the directory of
|
||||
* the provided file path or url string and moving up the directory tree.
|
||||
*
|
||||
* @param pathOrUrl A path or url string from which to start the search. The url
|
||||
* must be a file url. This is useful when you want to find the nearest
|
||||
* `package.json` file relative to the current module, as you can use
|
||||
* `import.meta.url`.
|
||||
* @returns The absolute path to the nearest `package.json` file.
|
||||
* @throws PackageJsonNotFoundError If no `package.json` file is found.
|
||||
*/
|
||||
export async function findClosestPackageJson(
|
||||
pathOrUrl: string,
|
||||
): Promise<string> {
|
||||
const filePath = getFilePath(pathOrUrl);
|
||||
|
||||
if (filePath === undefined) {
|
||||
throw new PackageJsonNotFoundError(pathOrUrl);
|
||||
}
|
||||
|
||||
const packageJsonPath = await findUp("package.json", filePath);
|
||||
|
||||
if (packageJsonPath === undefined) {
|
||||
throw new PackageJsonNotFoundError(pathOrUrl);
|
||||
}
|
||||
|
||||
return packageJsonPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the nearest `package.json` file, starting from provided path or url
|
||||
* string and moving up the directory tree.
|
||||
*
|
||||
* @param pathOrUrl A path or url string from which to start the search. The url
|
||||
* must be a file url. This is useful when you want to find the nearest
|
||||
* `package.json` file relative to the current module, as you can use
|
||||
* `import.meta.url`.
|
||||
* @returns The contents of the nearest `package.json` file, parsed as a
|
||||
* {@link PackageJson} object.
|
||||
* @throws PackageJsonNotFoundError If no `package.json` file is found.
|
||||
* @throws PackageJsonReadError If the `package.json` file is found but cannot
|
||||
* be read.
|
||||
*/
|
||||
export async function readClosestPackageJson(
|
||||
pathOrUrl: string,
|
||||
): Promise<PackageJson> {
|
||||
const packageJsonPath = await findClosestPackageJson(pathOrUrl);
|
||||
try {
|
||||
return await readJsonFile<PackageJson>(packageJsonPath);
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
throw new PackageJsonReadError(packageJsonPath, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the root directory of the nearest package, starting from the provided
|
||||
* path or url string and moving up the directory tree.
|
||||
*
|
||||
* This function uses `findClosestPackageJson` to find the nearest `package.json`
|
||||
* file and then returns the directory that contains that file.
|
||||
*
|
||||
* @param pathOrUrl A path or url string from which to start the search. The url
|
||||
* must be a file url. This is useful when you want to find the nearest
|
||||
* `package.json` file relative to the current module, as you can use
|
||||
* `import.meta.url`.
|
||||
* @returns The absolute path of the root directory of the nearest package.
|
||||
*/
|
||||
export async function findClosestPackageRoot(
|
||||
filePathOrUrl: string,
|
||||
): Promise<string> {
|
||||
const packageJsonPath = await findClosestPackageJson(filePathOrUrl);
|
||||
|
||||
return path.dirname(packageJsonPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a dependency starting by following the Node.js resolution algorithm
|
||||
* starting from `from`, and returns the dependency's package.json file, or
|
||||
* `undefined` if the dependency is not found.
|
||||
*
|
||||
* Note: This function uses Node.js's CommonJS resolution algorithm to find the
|
||||
* package.json file, and works with packages using package#exports, even if
|
||||
* they don't export the package.json file.
|
||||
*
|
||||
* @param from The absolute path from where to start the search (i.e. the file
|
||||
* importing the dependency, or its package root).
|
||||
* @param dependencyPackageName The name of the package to find.
|
||||
* @returns The absolute real path (resolved symlinks) of the package.json.
|
||||
*/
|
||||
export async function findDependencyPackageJson(
|
||||
from: string,
|
||||
dependencyPackageName: string,
|
||||
): Promise<string | undefined> {
|
||||
const require = createRequire(ensureTrailingSlash(from));
|
||||
|
||||
const lookupPaths = require.resolve.paths(dependencyPackageName) ?? [];
|
||||
|
||||
const pathToTest = [...dependencyPackageName.split("/"), "package.json"];
|
||||
|
||||
for (const lookupPath of lookupPaths) {
|
||||
const packageJsonPath = path.join(lookupPath, ...pathToTest);
|
||||
|
||||
if (await exists(packageJsonPath)) {
|
||||
return getRealPath(packageJsonPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
PackageJsonNotFoundError,
|
||||
PackageJsonReadError,
|
||||
} from "./errors/package.js";
|
||||
25
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/panic-errors.ts
generated
vendored
Executable file
25
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/panic-errors.ts
generated
vendored
Executable file
@@ -0,0 +1,25 @@
|
||||
import { numberToHexString } from "./hex.js";
|
||||
import { panicErrorCodeToReason } from "./internal/panic-errors.js";
|
||||
|
||||
/**
|
||||
* Converts a Solidity panic error code into a human-readable revert message.
|
||||
*
|
||||
* Solidity defines a set of standardized panic codes (0x01, 0x11, etc.)
|
||||
* that represent specific runtime errors (e.g. arithmetic overflow).
|
||||
* This function looks up the corresponding reason string and formats it
|
||||
* into a message similar to what clients like Hardhat or ethers.js display.
|
||||
*
|
||||
* @param errorCode The panic error code returned by the EVM as a bigint.
|
||||
* @returns A formatted message string:
|
||||
* - `"reverted with panic code <hex> (<reason>)"` if the code is recognized.
|
||||
* - `"reverted with unknown panic code <hex>"` if the code is not recognized.
|
||||
*/
|
||||
export function panicErrorCodeToMessage(errorCode: bigint): string {
|
||||
const reason = panicErrorCodeToReason(errorCode);
|
||||
|
||||
if (reason !== undefined) {
|
||||
return `reverted with panic code ${numberToHexString(errorCode)} (${reason})`;
|
||||
}
|
||||
|
||||
return `reverted with unknown panic code ${numberToHexString(errorCode)}`;
|
||||
}
|
||||
55
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/path.ts
generated
vendored
Executable file
55
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/path.ts
generated
vendored
Executable file
@@ -0,0 +1,55 @@
|
||||
import path from "node:path";
|
||||
|
||||
/**
|
||||
* Resolves a user-provided path into an absolute path.
|
||||
*
|
||||
* If the path is already absolute, it is returned as is, otherwise it is
|
||||
* resolved relative to the root.
|
||||
*
|
||||
* @param root A root path to resolve relative paths against.
|
||||
* @param target The target path to resolve.
|
||||
* @returns An absolute path.
|
||||
*/
|
||||
export function resolveFromRoot(root: string, target: string): string {
|
||||
if (path.isAbsolute(target)) {
|
||||
return target;
|
||||
}
|
||||
|
||||
return path.resolve(root, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to return a shorter version of the path if its inside the given folder.
|
||||
*
|
||||
* This is useful for displaying paths in the terminal, as they can be shorter
|
||||
* when they are inside the current working directory. For example, if the
|
||||
* current working directory is `/home/user/project`, and the path is
|
||||
* `/home/user/project/contracts/File.sol`, the shorter path is
|
||||
* `contracts/File.sol`.
|
||||
*
|
||||
* @param absolutePath The path to shorten.
|
||||
* @param folder The absolute path to the folder.
|
||||
* @returns The shorter path, if possible, or the original path.
|
||||
*/
|
||||
export function shortenPath(absolutePath: string): string {
|
||||
const cwd = process.cwd();
|
||||
let relativePath = path.relative(cwd, absolutePath);
|
||||
|
||||
if (relativePath === "..") {
|
||||
return ".." + path.sep;
|
||||
}
|
||||
|
||||
if (
|
||||
!relativePath.startsWith(".." + path.sep) &&
|
||||
!relativePath.startsWith("." + path.sep) &&
|
||||
!path.isAbsolute(relativePath)
|
||||
) {
|
||||
relativePath = "." + path.sep + relativePath;
|
||||
}
|
||||
|
||||
if (relativePath.length < absolutePath.length) {
|
||||
return relativePath;
|
||||
}
|
||||
|
||||
return absolutePath;
|
||||
}
|
||||
411
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/request.ts
generated
vendored
Executable file
411
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/request.ts
generated
vendored
Executable file
@@ -0,0 +1,411 @@
|
||||
import type EventEmitter from "node:events";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import type { ParsedUrlQueryInput } from "node:querystring";
|
||||
import type UndiciT from "undici";
|
||||
|
||||
import { open } from "node:fs/promises";
|
||||
import querystring from "node:querystring";
|
||||
import stream from "node:stream/promises";
|
||||
|
||||
import { ensureError } from "./error.js";
|
||||
import {
|
||||
DownloadError,
|
||||
RequestError,
|
||||
DispatcherError,
|
||||
} from "./errors/request.js";
|
||||
import { move, remove } from "./fs.js";
|
||||
import {
|
||||
generateTempFilePath,
|
||||
getBaseDispatcherOptions,
|
||||
getBaseRequestOptions,
|
||||
getBasicDispatcher,
|
||||
getPoolDispatcher,
|
||||
getProxyDispatcher,
|
||||
handleError,
|
||||
} from "./internal/request.js";
|
||||
|
||||
export const DEFAULT_TIMEOUT_IN_MILLISECONDS = 300_000; // Aligned with unidici
|
||||
export const DEFAULT_MAX_REDIRECTS = 10;
|
||||
export const DEFAULT_POOL_MAX_CONNECTIONS = 128;
|
||||
export const DEFAULT_USER_AGENT = "Hardhat";
|
||||
|
||||
export type Dispatcher = UndiciT.Dispatcher;
|
||||
export type TestDispatcher = UndiciT.MockAgent;
|
||||
export type Interceptable = UndiciT.Interceptable;
|
||||
|
||||
/**
|
||||
* Options to configure the dispatcher.
|
||||
*
|
||||
* @param timeout The timeout in milliseconds. Defaults to {@link DEFAULT_TIMEOUT_IN_MILLISECONDS}.
|
||||
* @param proxy The proxy to use. If not provided, no proxy is used.
|
||||
* @param pool Whether to use a pool dispatcher. Defaults to `false`.
|
||||
* @param maxConnections The maximum number of connections to use in the pool. Defaults to {@link DEFAULT_POOL_MAX_CONNECTIONS}.
|
||||
* @param isTestDispatcher Whether to use a test dispatcher. Defaults to `false`. It's highly recommended to use a test dispatcher in tests to avoid hanging tests.
|
||||
*/
|
||||
export interface DispatcherOptions {
|
||||
timeout?: number;
|
||||
proxy?: string;
|
||||
pool?: boolean;
|
||||
maxConnections?: number;
|
||||
isTestDispatcher?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to configure a request.
|
||||
*
|
||||
* @param queryParams The query parameters to append to the url.
|
||||
* @param extraHeaders Additional headers to include in the request.
|
||||
* @param abortSignal The signal to abort the request.
|
||||
*/
|
||||
export interface RequestOptions {
|
||||
queryParams?: Record<string, any>;
|
||||
extraHeaders?: Record<string, string>;
|
||||
abortSignal?: AbortSignal | EventEmitter;
|
||||
}
|
||||
|
||||
export interface HttpResponse {
|
||||
statusCode: number;
|
||||
body: {
|
||||
json(): Promise<any>;
|
||||
text(): Promise<string>;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a HTTP request.
|
||||
*
|
||||
* @param url The url to make the request to.
|
||||
* @param requestOptions The options to configure the request. See {@link RequestOptions}.
|
||||
* @param dispatcherOrDispatcherOptions Either a dispatcher or dispatcher options. See {@link DispatcherOptions}.
|
||||
* @returns An object containing the status code and the response body. The body can be accessed as JSON or text.
|
||||
* `body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`.
|
||||
* @throws ConnectionRefusedError If the connection is refused by the server.
|
||||
* @throws RequestTimeoutError If the request times out.
|
||||
* @throws RequestError If the request fails for any other reason.
|
||||
*/
|
||||
export async function getRequest(
|
||||
url: string,
|
||||
requestOptions: RequestOptions = {},
|
||||
dispatcherOrDispatcherOptions?: UndiciT.Dispatcher | DispatcherOptions,
|
||||
): Promise<HttpResponse> {
|
||||
const { request } = await import("undici");
|
||||
|
||||
try {
|
||||
const baseRequestOptions = await getBaseRequestOptions(
|
||||
url,
|
||||
requestOptions,
|
||||
dispatcherOrDispatcherOptions,
|
||||
);
|
||||
return await request(url, {
|
||||
method: "GET",
|
||||
...baseRequestOptions,
|
||||
});
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
handleError(e, url);
|
||||
|
||||
throw new RequestError(url, "GET", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a POST request with a JSON body.
|
||||
*
|
||||
* @param url The url to make the request to.
|
||||
* @param body The body of the request, represented as an object.
|
||||
* @param requestOptions The options to configure the request. See {@link RequestOptions}.
|
||||
* @param dispatcherOrDispatcherOptions Either a dispatcher or dispatcher options. See {@link DispatcherOptions}.
|
||||
* @returns An object containing the status code and the response body. The body can be accessed as JSON or text.
|
||||
* `body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`.
|
||||
* @throws ConnectionRefusedError If the connection is refused by the server.
|
||||
* @throws RequestTimeoutError If the request times out.
|
||||
* @throws RequestError If the request fails for any other reason.
|
||||
*/
|
||||
export async function postJsonRequest(
|
||||
url: string,
|
||||
body: unknown,
|
||||
requestOptions: RequestOptions = {},
|
||||
dispatcherOrDispatcherOptions?: UndiciT.Dispatcher | DispatcherOptions,
|
||||
): Promise<HttpResponse> {
|
||||
const { request } = await import("undici");
|
||||
|
||||
try {
|
||||
const { headers, ...baseRequestOptions } = await getBaseRequestOptions(
|
||||
url,
|
||||
requestOptions,
|
||||
dispatcherOrDispatcherOptions,
|
||||
);
|
||||
return await request(url, {
|
||||
method: "POST",
|
||||
...baseRequestOptions,
|
||||
headers: {
|
||||
...headers,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
handleError(e, url);
|
||||
|
||||
throw new RequestError(url, "POST", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a POST request with a form body.
|
||||
*
|
||||
* @param url The url to make the request to.
|
||||
* @param body The body of the request, represented as an object.
|
||||
* @param requestOptions The options to configure the request. See {@link RequestOptions}.
|
||||
* @param dispatcherOrDispatcherOptions Either a dispatcher or dispatcher options. See {@link DispatcherOptions}.
|
||||
* @returns An object containing the status code and the response body. The body can be accessed as JSON or text.
|
||||
* `body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`.
|
||||
* @throws ConnectionRefusedError If the connection is refused by the server.
|
||||
* @throws RequestTimeoutError If the request times out.
|
||||
* @throws RequestError If the request fails for any other reason.
|
||||
*/
|
||||
export async function postFormRequest(
|
||||
url: string,
|
||||
body: unknown,
|
||||
requestOptions: RequestOptions = {},
|
||||
dispatcherOrDispatcherOptions?: UndiciT.Dispatcher | DispatcherOptions,
|
||||
): Promise<HttpResponse> {
|
||||
const { request } = await import("undici");
|
||||
|
||||
try {
|
||||
const { headers, ...baseRequestOptions } = await getBaseRequestOptions(
|
||||
url,
|
||||
requestOptions,
|
||||
dispatcherOrDispatcherOptions,
|
||||
);
|
||||
return await request(url, {
|
||||
method: "POST",
|
||||
...baseRequestOptions,
|
||||
headers: {
|
||||
...headers,
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- TODO: Add a runtime check for body's type
|
||||
body: querystring.stringify(body as ParsedUrlQueryInput),
|
||||
});
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
handleError(e, url);
|
||||
|
||||
throw new RequestError(url, "POST", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a file from a url to a destination path.
|
||||
*
|
||||
* @param url The url to download from.
|
||||
* @param destination The absolute path to save the file to.
|
||||
* @param requestOptions The options to configure the request. See {@link RequestOptions}.
|
||||
* @param dispatcherOrDispatcherOptions Either a dispatcher or dispatcher options. See {@link DispatcherOptions}.
|
||||
* @throws ConnectionRefusedError If the connection is refused by the server.
|
||||
* @throws RequestTimeoutError If the request times out.
|
||||
* @throws DownloadFailedError If the download fails for any other reason.
|
||||
*/
|
||||
export async function download(
|
||||
url: string,
|
||||
destination: string,
|
||||
requestOptions: RequestOptions = {},
|
||||
dispatcherOrDispatcherOptions?: UndiciT.Dispatcher | DispatcherOptions,
|
||||
): Promise<void> {
|
||||
let statusCode: number | undefined;
|
||||
let tempFilePath: string | undefined;
|
||||
|
||||
try {
|
||||
/* eslint-disable-next-line @typescript-eslint/consistent-type-assertions
|
||||
-- We need the full Dispatcher.ResponseData here for stream.pipeline,
|
||||
but HttpResponse doesn't expose the raw ReadableStream.
|
||||
TODO: wrap undici's request so we can keep the public API
|
||||
strictly typed without falling back to Undici types. */
|
||||
const response = (await getRequest(
|
||||
url,
|
||||
requestOptions,
|
||||
dispatcherOrDispatcherOptions,
|
||||
)) as UndiciT.Dispatcher.ResponseData;
|
||||
const { body } = response;
|
||||
statusCode = response.statusCode;
|
||||
|
||||
if (statusCode < 200 || statusCode >= 300) {
|
||||
throw new Error(await body.text());
|
||||
}
|
||||
|
||||
tempFilePath = await generateTempFilePath(destination);
|
||||
|
||||
let fileHandle: FileHandle | undefined;
|
||||
|
||||
try {
|
||||
fileHandle = await open(tempFilePath, "w");
|
||||
|
||||
const fileStream = fileHandle.createWriteStream();
|
||||
|
||||
await stream.pipeline(body, fileStream);
|
||||
} finally {
|
||||
// NOTE: Historically, not closing the file handle caused issues on Windows,
|
||||
// for example, when trying to move the file previously written to by this function
|
||||
await fileHandle?.close();
|
||||
}
|
||||
|
||||
await move(tempFilePath, destination);
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
if (tempFilePath !== undefined) {
|
||||
try {
|
||||
await remove(tempFilePath);
|
||||
} catch {
|
||||
// Best-effort: file may not exist or may have already been moved
|
||||
}
|
||||
}
|
||||
|
||||
handleError(e, url);
|
||||
|
||||
throw new DownloadError(url, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a dispatcher based on the provided options.
|
||||
* If the `proxy` option is set, it creates a {@link UndiciT.ProxyAgent} dispatcher.
|
||||
* If the `pool` option is set to `true`, it creates a {@link UndiciT.Pool} dispatcher.
|
||||
* Otherwise, it creates a basic {@link UndiciT.Agent} dispatcher.
|
||||
*
|
||||
* @param url The url to make requests to.
|
||||
* @param options The options to configure the dispatcher. See {@link DispatcherOptions}.
|
||||
* @returns The configured dispatcher instance.
|
||||
* @throws DispatcherError If the dispatcher can't be created.
|
||||
*/
|
||||
export async function getDispatcher(
|
||||
url: string,
|
||||
{
|
||||
timeout,
|
||||
proxy,
|
||||
pool,
|
||||
maxConnections,
|
||||
isTestDispatcher,
|
||||
}: DispatcherOptions = {},
|
||||
): Promise<Dispatcher> {
|
||||
try {
|
||||
if (pool !== undefined && proxy !== undefined) {
|
||||
throw new Error(
|
||||
"The pool and proxy options can't be used at the same time",
|
||||
);
|
||||
}
|
||||
const baseOptions = getBaseDispatcherOptions(timeout, isTestDispatcher);
|
||||
|
||||
if (proxy !== undefined) {
|
||||
return await getProxyDispatcher(proxy, baseOptions);
|
||||
}
|
||||
|
||||
if (pool === true) {
|
||||
return await getPoolDispatcher(url, {
|
||||
...baseOptions,
|
||||
connections: maxConnections ?? DEFAULT_POOL_MAX_CONNECTIONS,
|
||||
});
|
||||
}
|
||||
|
||||
return await getBasicDispatcher(baseOptions);
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
throw new DispatcherError(e.message, e);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getTestDispatcher(
|
||||
options: {
|
||||
timeout?: number;
|
||||
} = {},
|
||||
): Promise<TestDispatcher> {
|
||||
const { MockAgent } = await import("undici");
|
||||
|
||||
const baseOptions = getBaseDispatcherOptions(options.timeout, true);
|
||||
return new MockAgent(baseOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether a proxy should be used for a given url.
|
||||
*
|
||||
* @param url The url to check.
|
||||
* @returns `true` if a proxy should be used for the url, `false` otherwise.
|
||||
*/
|
||||
export function shouldUseProxy(url: string): boolean {
|
||||
const { hostname } = new URL(url);
|
||||
const noProxy = process.env.NO_PROXY;
|
||||
|
||||
if (hostname === "localhost" || hostname === "127.0.0.1" || noProxy === "*") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (noProxy !== undefined && noProxy !== "") {
|
||||
const noProxySet = new Set(noProxy.split(","));
|
||||
|
||||
if (noProxySet.has(hostname)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether an absolute url is valid.
|
||||
*
|
||||
* @param url The url to check.
|
||||
* @returns `true` if the url is valid, `false` otherwise.
|
||||
*/
|
||||
export function isValidUrl(url: string): boolean {
|
||||
try {
|
||||
new URL(url);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the proxy URL from environment variables based on the target URL.
|
||||
* For HTTPS URLs, checks `https_proxy` then `HTTPS_PROXY`.
|
||||
* For HTTP URLs, checks `http_proxy` then `HTTP_PROXY`.
|
||||
* Falls back to the other protocol's proxy if none found.
|
||||
*
|
||||
* @param url The target URL to determine proxy for.
|
||||
* @returns The proxy URL, or `undefined` if none are set.
|
||||
*/
|
||||
export function getProxyUrl(url: string): string | undefined {
|
||||
const { protocol } = new URL(url);
|
||||
|
||||
if (protocol === "https:") {
|
||||
return (
|
||||
process.env.https_proxy ??
|
||||
process.env.HTTPS_PROXY ??
|
||||
process.env.http_proxy ??
|
||||
process.env.HTTP_PROXY
|
||||
);
|
||||
} else if (protocol === "http:") {
|
||||
return (
|
||||
process.env.http_proxy ??
|
||||
process.env.HTTP_PROXY ??
|
||||
process.env.https_proxy ??
|
||||
process.env.HTTPS_PROXY
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export {
|
||||
ConnectionRefusedError,
|
||||
DispatcherError,
|
||||
DownloadError,
|
||||
RequestError,
|
||||
RequestTimeoutError,
|
||||
ResponseStatusCodeError,
|
||||
} from "./errors/request.js";
|
||||
130
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/spinner.ts
generated
vendored
Executable file
130
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/spinner.ts
generated
vendored
Executable file
@@ -0,0 +1,130 @@
|
||||
const FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
|
||||
export const FRAME_INTERVAL_MS = 80;
|
||||
|
||||
export interface ISpinner {
|
||||
readonly isEnabled: boolean;
|
||||
start(): void;
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional settings when creating a spinner.
|
||||
*/
|
||||
export interface SpinnerOptions {
|
||||
/**
|
||||
* Text shown next to the spinner.
|
||||
*/
|
||||
text?: string;
|
||||
|
||||
/**
|
||||
* Stream used to write frames.
|
||||
*/
|
||||
stream?: NodeJS.WriteStream;
|
||||
|
||||
/**
|
||||
* Whether the spinner is enabled.
|
||||
*/
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Spinner that writes frames to a stream.
|
||||
*/
|
||||
class Spinner implements ISpinner {
|
||||
public readonly isEnabled: boolean;
|
||||
readonly #text: string;
|
||||
#interval: NodeJS.Timeout | null = null;
|
||||
readonly #stream: NodeJS.WriteStream;
|
||||
|
||||
constructor(options: Required<SpinnerOptions>) {
|
||||
this.isEnabled = options.enabled;
|
||||
this.#stream = options.stream;
|
||||
this.#text = options.text;
|
||||
}
|
||||
/**
|
||||
* Begin rendering frames when enabled.
|
||||
*/
|
||||
public start(): void {
|
||||
if (!this.isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#stopAnimation();
|
||||
let frameIndex = 0;
|
||||
|
||||
this.#interval = setInterval(() => {
|
||||
this.#render(FRAMES[frameIndex]);
|
||||
frameIndex = (frameIndex + 1) % FRAMES.length;
|
||||
}, FRAME_INTERVAL_MS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the spinner without printing a final line.
|
||||
*/
|
||||
public stop(): void {
|
||||
this.#stopAnimation();
|
||||
}
|
||||
|
||||
#clearLine(): void {
|
||||
this.#stream.clearLine(0);
|
||||
this.#stream.cursorTo(0);
|
||||
}
|
||||
|
||||
#render(frame: string): void {
|
||||
if (!this.isEnabled) {
|
||||
return;
|
||||
}
|
||||
this.#clearLine();
|
||||
this.#stream.write(`${frame} ${this.#text}`);
|
||||
}
|
||||
|
||||
#stopAnimation(): void {
|
||||
if (this.#interval === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
clearInterval(this.#interval);
|
||||
this.#interval = null;
|
||||
|
||||
if (this.isEnabled) {
|
||||
this.#clearLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a spinner instance.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const spinner = createSpinner({ text: "Compiling…" });
|
||||
* spinner.start();
|
||||
*
|
||||
* try {
|
||||
* await compileContracts();
|
||||
* spinner.stop();
|
||||
* console.log("Compiled 12 contracts");
|
||||
* } catch (error) {
|
||||
* spinner.stop();
|
||||
* console.error("Compilation failed");
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @param options Optional spinner configuration.
|
||||
* @returns {Spinner} A spinner instance.
|
||||
*/
|
||||
export function createSpinner(options: SpinnerOptions = {}): ISpinner {
|
||||
const stream = options.stream ?? process.stdout;
|
||||
|
||||
const enabled =
|
||||
stream.isTTY === true &&
|
||||
process.env.TERM !== "dumb" &&
|
||||
(options.enabled ?? true);
|
||||
|
||||
const text = options.text ?? "";
|
||||
return new Spinner({
|
||||
enabled,
|
||||
stream,
|
||||
text,
|
||||
});
|
||||
}
|
||||
16
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/stream.ts
generated
vendored
Executable file
16
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/stream.ts
generated
vendored
Executable file
@@ -0,0 +1,16 @@
|
||||
import { Writable } from "node:stream";
|
||||
|
||||
/**
|
||||
* Creates a Transform that writes everything to actualWritable, without closing it
|
||||
* when finished.
|
||||
*
|
||||
* This is useful to pipe things to stdout, without closing it, while being
|
||||
* able to await for the result of the pipe to finish.
|
||||
*/
|
||||
export function createNonClosingWriter(actualWritable: Writable): Writable {
|
||||
return new Writable({
|
||||
write(chunk, encoding, callback) {
|
||||
actualWritable.write(chunk, encoding, callback);
|
||||
},
|
||||
});
|
||||
}
|
||||
69
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/string.ts
generated
vendored
Executable file
69
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/string.ts
generated
vendored
Executable file
@@ -0,0 +1,69 @@
|
||||
/**
|
||||
* Converts a word in singular form to a pluralized string based on the number
|
||||
* of items.
|
||||
*
|
||||
* @param singular The singular form of the word.
|
||||
* @param count The number of items. This determines whether the singular or
|
||||
* plural form is used.
|
||||
* @param plural The optional plural form of the word. If not provided, the
|
||||
* plural form is created by appending an "s" to the singular form.
|
||||
* @returns The pluralized string.
|
||||
*/
|
||||
export function pluralize(
|
||||
singular: string,
|
||||
count: number,
|
||||
plural?: string,
|
||||
): string {
|
||||
if (count === 1) {
|
||||
return singular;
|
||||
}
|
||||
|
||||
return plural !== undefined ? plural : `${singular}s`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Capitalizes the first letter of a string.
|
||||
*
|
||||
* @param str The string to capitalize.
|
||||
* @returns The string with the first letter capitalized.
|
||||
*/
|
||||
export function capitalize(str: string): string {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a kebab-case string to camelCase.
|
||||
*
|
||||
* @param str The kebab-case string to convert.
|
||||
* @returns The camelCase string.
|
||||
*/
|
||||
export function kebabToCamelCase(str: string): string {
|
||||
return str.replace(/-./g, (match) => match.charAt(1).toUpperCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a camelCase string to snake_case.
|
||||
*
|
||||
* @param str The camelCase string to convert.
|
||||
* @returns The snake_case string.
|
||||
*/
|
||||
export function camelToSnakeCase(str: string): string {
|
||||
return str.replace(/[A-Z0-9]/g, (match) => `_${match.toLowerCase()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a camelCase string to kebab-case.
|
||||
*
|
||||
* @param str The camelCase string to convert.
|
||||
* @returns The kebab-case string.
|
||||
*/
|
||||
export function camelToKebabCase(str: string): string {
|
||||
return str.replace(/[A-Z0-9]/g, (match) => `-${match.toLowerCase()}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures a string ends with a slash.
|
||||
*/
|
||||
export function ensureTrailingSlash(path: string): string {
|
||||
return path.endsWith("/") ? path : path + "/";
|
||||
}
|
||||
46
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/subprocess.ts
generated
vendored
Executable file
46
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/subprocess.ts
generated
vendored
Executable file
@@ -0,0 +1,46 @@
|
||||
import { spawn } from "node:child_process";
|
||||
|
||||
import {
|
||||
SubprocessFileNotFoundError,
|
||||
SubprocessPathIsDirectoryError,
|
||||
} from "./errors/subprocess.js";
|
||||
import { exists, isDirectory } from "./fs.js";
|
||||
|
||||
/**
|
||||
* Spawns a detached subprocess to execute a given file with optional arguments.
|
||||
*
|
||||
* @param absolutePathToSubProcessFile - The absolute path to the subprocess file to be executed.
|
||||
* @param args - Optional list of string arguments to pass to the subprocess.
|
||||
* @param env - Optional environment key-value pairs to pass to the subprocess.
|
||||
*
|
||||
* The subprocess runs in a detached mode and its standard input/output is ignored.
|
||||
* This function does not wait for the subprocess to complete and the subprocess is unreferenced
|
||||
* to allow the parent process to exit independently.
|
||||
*/
|
||||
export async function spawnDetachedSubProcess(
|
||||
absolutePathToSubProcessFile: string,
|
||||
args: string[] = [],
|
||||
env: Record<string, string> = {},
|
||||
): Promise<void> {
|
||||
if ((await exists(absolutePathToSubProcessFile)) === false) {
|
||||
throw new SubprocessFileNotFoundError(absolutePathToSubProcessFile);
|
||||
}
|
||||
|
||||
if ((await isDirectory(absolutePathToSubProcessFile)) === true) {
|
||||
throw new SubprocessPathIsDirectoryError(absolutePathToSubProcessFile);
|
||||
}
|
||||
|
||||
const subprocessArgs = [absolutePathToSubProcessFile, ...args];
|
||||
|
||||
if (absolutePathToSubProcessFile.endsWith(".ts")) {
|
||||
subprocessArgs.unshift("--import", import.meta.resolve("tsx/esm"));
|
||||
}
|
||||
|
||||
const subprocess = spawn(process.execPath, subprocessArgs, {
|
||||
detached: true,
|
||||
env,
|
||||
stdio: "ignore",
|
||||
});
|
||||
|
||||
subprocess.unref();
|
||||
}
|
||||
624
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/synchronization.ts
generated
vendored
Executable file
624
dev/env/node_modules/@nomicfoundation/hardhat-utils/src/synchronization.ts
generated
vendored
Executable file
@@ -0,0 +1,624 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import * as fs from "node:fs";
|
||||
import * as os from "node:os";
|
||||
import * as path from "node:path";
|
||||
|
||||
import debug from "debug";
|
||||
|
||||
import { ensureError, ensureNodeErrnoExceptionError } from "./error.js";
|
||||
import {
|
||||
BaseMultiProcessMutexError,
|
||||
IncompatibleHostnameMultiProcessMutexError,
|
||||
IncompatiblePlatformMultiProcessMutexError,
|
||||
IncompatibleUidMultiProcessMutexError,
|
||||
InvalidMultiProcessMutexPathError,
|
||||
MultiProcessMutexError,
|
||||
MultiProcessMutexTimeoutError,
|
||||
StaleMultiProcessMutexError,
|
||||
} from "./errors/synchronization.js";
|
||||
import { ensureDir } from "./fs.js";
|
||||
import { sleep } from "./lang.js";
|
||||
|
||||
export {
|
||||
IncompatibleHostnameMultiProcessMutexError,
|
||||
IncompatibleMultiProcessMutexError,
|
||||
IncompatiblePlatformMultiProcessMutexError,
|
||||
IncompatibleUidMultiProcessMutexError,
|
||||
InvalidMultiProcessMutexPathError,
|
||||
MultiProcessMutexError,
|
||||
MultiProcessMutexTimeoutError,
|
||||
StaleMultiProcessMutexError,
|
||||
} from "./errors/synchronization.js";
|
||||
|
||||
const log = debug("hardhat:util:multi-process-mutex");
|
||||
|
||||
const PROCESS_SESSION_ID = randomUUID();
|
||||
const DEFAULT_TIMEOUT_MS = 60_000;
|
||||
const DEFAULT_INITIAL_POLL_INTERVAL_MS = 5;
|
||||
const MAX_POLL_INTERVAL_MS = 200;
|
||||
|
||||
/**
|
||||
* Error codes indicating hard links are definitively unsupported on the
|
||||
* target filesystem. These cause immediate failure rather than retries.
|
||||
*/
|
||||
const HARD_LINK_UNSUPPORTED_CODES = new Set(["EOPNOTSUPP", "ENOTSUP", "EXDEV"]);
|
||||
|
||||
interface LockMetadata {
|
||||
pid: number;
|
||||
hostname: string;
|
||||
createdAt: number;
|
||||
uid?: number;
|
||||
platform: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
type StalenessResult =
|
||||
| { isStale: true; metadata: LockMetadata | undefined }
|
||||
| { isStale: false };
|
||||
|
||||
type AcquireResult =
|
||||
| { acquired: true }
|
||||
| { acquired: false; reclaimedStaleLock: boolean };
|
||||
|
||||
/**
|
||||
* A class that implements an inter-process mutex.
|
||||
*
|
||||
* This Mutex is implemented using hard-link-based atomic file creation. A
|
||||
* temporary file containing JSON metadata (PID, hostname, platform, uid,
|
||||
* session ID, and creation timestamp) is written first, then hard-linked to
|
||||
* the lock path via `fs.linkSync`. `linkSync` fails atomically with `EEXIST`
|
||||
* if the lock already exists, ensuring only one process can hold the lock at
|
||||
* a time.
|
||||
*
|
||||
* Staleness is determined by PID liveness only — timestamps are stored for
|
||||
* debugging purposes but are never used to determine staleness. This avoids the
|
||||
* clock-skew and long-running-task problems that time-based staleness detection
|
||||
* has (where a second process can break into a lock that's still legitimately
|
||||
* held).
|
||||
*
|
||||
* Incompatible locks — those created by a different hostname, platform, or
|
||||
* uid — are rejected immediately with specific subclasses of
|
||||
* `IncompatibleMultiProcessMutexError`
|
||||
* (`IncompatibleHostnameMultiProcessMutexError`,
|
||||
* `IncompatiblePlatformMultiProcessMutexError`, or
|
||||
* `IncompatibleUidMultiProcessMutexError`) because their PID liveness cannot
|
||||
* be verified or their lock file cannot be removed. These must be removed
|
||||
* manually.
|
||||
*
|
||||
* When the lock is held by a live process, the caller polls with exponential
|
||||
* backoff (default: 5ms → 10ms → ... → 160ms → 200ms cap) until the lock is
|
||||
* released or a timeout (default: 60s) is reached.
|
||||
*
|
||||
* If the filesystem does not support hard links (e.g., certain network
|
||||
* filesystems), acquisition fails fast with a `MultiProcessMutexError` rather
|
||||
* than degrading into timeout-based retries.
|
||||
*
|
||||
* ## Performance characteristics
|
||||
*
|
||||
* - **Uncontended acquisition:** One temp file write + one `linkSync` — takes
|
||||
* less than 1ms on most systems.
|
||||
* - **Stale lock recovery:** One `readFileSync` to read metadata, one
|
||||
* `process.kill(pid, 0)` liveness check, and one `unlinkSync` to remove the
|
||||
* stale lock file before retrying acquisition. The retry is immediate (no
|
||||
* sleep), so recovery adds sub-millisecond overhead.
|
||||
* - **Contended (live holder):** Polls with exponential backoff starting at
|
||||
* 5ms and doubling each iteration until capped at 200ms. Worst-case latency
|
||||
* after the lock is released is up to `MAX_POLL_INTERVAL_MS` (200ms).
|
||||
* - **Release:** A single `unlinkSync` call.
|
||||
*
|
||||
* ## Limitations
|
||||
*
|
||||
* - **Polling-based:** There is no filesystem notification; callers discover
|
||||
* that the lock is free only on the next poll, so there can be up to 200ms
|
||||
* of wasted wait time after the lock is released.
|
||||
* - **Not reentrant:** The same process (or even the same `MultiProcessMutex`
|
||||
* instance) calling `use()` while already holding the lock will deadlock
|
||||
* until the timeout fires.
|
||||
* - **Single-host, single-user only:** Encountering a lock from a different
|
||||
* hostname throws `IncompatibleHostnameMultiProcessMutexError`, a different
|
||||
* platform throws `IncompatiblePlatformMultiProcessMutexError`, and a
|
||||
* different uid throws `IncompatibleUidMultiProcessMutexError`. All extend
|
||||
* `IncompatibleMultiProcessMutexError`. This means the lock is not safe to
|
||||
* use on shared/networked filesystems (e.g., NFS) where multiple hosts or
|
||||
* users may access the same path.
|
||||
* - **Requires hard-link support:** The underlying filesystem must support
|
||||
* `linkSync`. If hard links are unsupported, acquisition fails immediately
|
||||
* with `MultiProcessMutexError`.
|
||||
* - **PID recycling:** If a process dies and the OS reassigns its PID to a new
|
||||
* unrelated process before the stale check runs, the lock is incorrectly
|
||||
* considered live. This is extremely unlikely in practice due to the large
|
||||
* PID space on modern systems.
|
||||
* - **No fairness guarantee:** Multiple waiters polling concurrently have no
|
||||
* guaranteed ordering — whichever one succeeds at `linkSync` first after the
|
||||
* lock is released wins.
|
||||
*/
|
||||
export class MultiProcessMutex {
|
||||
readonly #lockFilePath: string;
|
||||
readonly #timeout: number;
|
||||
readonly #initialPollInterval: number;
|
||||
|
||||
/**
|
||||
* Creates an inter-process mutex given an absolute path.
|
||||
*
|
||||
* @param absolutePathToLock The absolute path of the mutex.
|
||||
* @param timeout The max amount of time to spend trying to acquire the lock
|
||||
* in milliseconds. Defaults to 60000.
|
||||
* @param initialPollInterval The initial poll interval in milliseconds.
|
||||
* Defaults to 5.
|
||||
*/
|
||||
constructor(
|
||||
absolutePathToLock: string,
|
||||
timeout?: number,
|
||||
initialPollInterval?: number,
|
||||
) {
|
||||
if (!path.isAbsolute(absolutePathToLock)) {
|
||||
throw new InvalidMultiProcessMutexPathError(absolutePathToLock);
|
||||
}
|
||||
this.#lockFilePath = absolutePathToLock;
|
||||
|
||||
this.#timeout = timeout ?? DEFAULT_TIMEOUT_MS;
|
||||
this.#initialPollInterval =
|
||||
initialPollInterval ?? DEFAULT_INITIAL_POLL_INTERVAL_MS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the function f while holding the mutex, returning its result.
|
||||
*
|
||||
* @param f The function to run.
|
||||
* @returns The result of the function.
|
||||
*/
|
||||
public async use<T>(f: () => Promise<T>): Promise<T> {
|
||||
const release = await this.acquire();
|
||||
|
||||
try {
|
||||
return await f();
|
||||
} finally {
|
||||
await release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquires the mutex, returning an async function to release it.
|
||||
* The function MUST be called after using the mutex.
|
||||
*
|
||||
* If this function throws, no cleanup is necessary — the lock was never
|
||||
* acquired.
|
||||
*
|
||||
* @returns The mutex's release function.
|
||||
*/
|
||||
public async acquire(): Promise<() => Promise<void>> {
|
||||
log(`Starting mutex process with lock file '${this.#lockFilePath}'`);
|
||||
|
||||
try {
|
||||
await this.#acquireLock();
|
||||
} catch (e) {
|
||||
ensureError(e);
|
||||
|
||||
if (e instanceof BaseMultiProcessMutexError) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
throw new MultiProcessMutexError(this.#lockFilePath, e);
|
||||
}
|
||||
|
||||
let released = false;
|
||||
|
||||
return async () => {
|
||||
if (released) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#releaseLock();
|
||||
released = true;
|
||||
};
|
||||
}
|
||||
|
||||
async #acquireLock(): Promise<void> {
|
||||
const startTime = Date.now();
|
||||
let pollInterval = this.#initialPollInterval;
|
||||
|
||||
await ensureDir(path.dirname(this.#lockFilePath));
|
||||
|
||||
while (true) {
|
||||
const result = this.#tryAcquire();
|
||||
|
||||
if (result.acquired) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check timeout
|
||||
const elapsed = Date.now() - startTime;
|
||||
if (elapsed >= this.#timeout) {
|
||||
throw new MultiProcessMutexTimeoutError(
|
||||
this.#lockFilePath,
|
||||
this.#timeout,
|
||||
);
|
||||
}
|
||||
|
||||
// Skip sleep after reclaiming a stale lock — retry immediately
|
||||
if (result.reclaimedStaleLock) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Wait with exponential backoff
|
||||
log(`Lock at ${this.#lockFilePath} is busy, waiting ${pollInterval}ms`);
|
||||
await sleep(pollInterval / 1000);
|
||||
|
||||
// Exponential backoff, capped
|
||||
pollInterval = Math.min(pollInterval * 2, MAX_POLL_INTERVAL_MS);
|
||||
}
|
||||
}
|
||||
|
||||
#releaseLock(): void {
|
||||
try {
|
||||
fs.unlinkSync(this.#lockFilePath);
|
||||
log(`Released lock at ${this.#lockFilePath}`);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ENOENT") {
|
||||
log(`Lock at ${this.#lockFilePath} already removed`);
|
||||
return;
|
||||
}
|
||||
throw new MultiProcessMutexError(this.#lockFilePath, e);
|
||||
}
|
||||
}
|
||||
|
||||
#tryAcquire(): AcquireResult {
|
||||
const lockPath = this.#lockFilePath;
|
||||
|
||||
// Fast path: if the lock file already exists, check staleness directly
|
||||
// without creating temp files. This is both an optimization for the
|
||||
// common contended case and is required for correct behavior when the
|
||||
// parent directory is read-only (stale locks can still be detected via
|
||||
// readFileSync even when file creation in the directory is blocked).
|
||||
//
|
||||
// Note: handleExistingLock() must be called outside the try/catch so
|
||||
// that errors like StaleMultiProcessMutexError propagate correctly.
|
||||
let lockExists = false;
|
||||
try {
|
||||
fs.accessSync(lockPath, fs.constants.F_OK);
|
||||
lockExists = true;
|
||||
} catch {
|
||||
// Lock doesn't exist (or can't be checked) — proceed to acquire
|
||||
}
|
||||
|
||||
if (lockExists) {
|
||||
return this.#handleExistingLock();
|
||||
}
|
||||
|
||||
// Lock doesn't appear to exist — try to acquire via temp file + hard link
|
||||
const metadata = this.#buildMetadata();
|
||||
const contents = JSON.stringify(metadata, null, 2);
|
||||
|
||||
const randomSuffix = Math.random().toString(16).slice(2);
|
||||
const tempPath = `${lockPath}.tmp-${process.pid}-${PROCESS_SESSION_ID}-${Date.now()}-${randomSuffix}`;
|
||||
|
||||
let tempFd: number | undefined;
|
||||
try {
|
||||
// Create temp file with exclusive flag to prevent collisions
|
||||
tempFd = fs.openSync(tempPath, "wx");
|
||||
fs.writeFileSync(tempFd, contents, "utf8");
|
||||
fs.closeSync(tempFd);
|
||||
tempFd = undefined;
|
||||
|
||||
// Attempt atomic hard link to the lock path
|
||||
fs.linkSync(tempPath, lockPath);
|
||||
|
||||
log(`Acquired lock at ${lockPath}`);
|
||||
|
||||
// Best-effort cleanup of temp files left by dead processes.
|
||||
// We hold the lock, so only one process runs this at a time.
|
||||
this.#cleanupDeadProcessTempFiles();
|
||||
|
||||
return { acquired: true };
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
|
||||
if (e.code === "EEXIST") {
|
||||
// Lock was created between our accessSync and linkSync
|
||||
return this.#handleExistingLock();
|
||||
}
|
||||
|
||||
if (e.code === "ENOENT") {
|
||||
// Parent directory doesn't exist. Create it and retry.
|
||||
const parentDir = path.dirname(lockPath);
|
||||
log(`Parent directory ${parentDir} does not exist, creating it`);
|
||||
fs.mkdirSync(parentDir, { recursive: true });
|
||||
return { acquired: false, reclaimedStaleLock: false };
|
||||
}
|
||||
|
||||
// Hard links definitively unsupported — fail fast
|
||||
if (HARD_LINK_UNSUPPORTED_CODES.has(e.code ?? "")) {
|
||||
throw new MultiProcessMutexError(lockPath, e);
|
||||
}
|
||||
|
||||
// We retry on permission errors, as this is a common transient failure
|
||||
// on Windows.
|
||||
if (e.code === "EPERM" || e.code === "EACCES") {
|
||||
log("Failed to acquire lock, retrying due to permission error");
|
||||
return { acquired: false, reclaimedStaleLock: false };
|
||||
}
|
||||
|
||||
// Any other error (ENAMETOOLONG, ENOSPC, etc.)
|
||||
throw new MultiProcessMutexError(lockPath, e);
|
||||
} finally {
|
||||
// Close fd if still open (write or close failed)
|
||||
if (tempFd !== undefined) {
|
||||
try {
|
||||
fs.closeSync(tempFd);
|
||||
} catch {
|
||||
// Best effort
|
||||
}
|
||||
}
|
||||
|
||||
// Always clean up the temp file
|
||||
try {
|
||||
fs.unlinkSync(tempPath);
|
||||
} catch {
|
||||
// Best effort — file may not exist if openSync failed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#handleExistingLock(): AcquireResult {
|
||||
const staleness = this.#checkStaleness();
|
||||
|
||||
if (staleness.isStale) {
|
||||
const reclaimed = this.#tryUnlockingStaleLock(staleness.metadata);
|
||||
return { acquired: false, reclaimedStaleLock: reclaimed };
|
||||
}
|
||||
|
||||
return { acquired: false, reclaimedStaleLock: false };
|
||||
}
|
||||
|
||||
#checkStaleness(): StalenessResult {
|
||||
const lockPath = this.#lockFilePath;
|
||||
const metadata = this.#readMetadata();
|
||||
|
||||
if (metadata === undefined) {
|
||||
log(
|
||||
`Lock at ${lockPath} has missing/corrupt metadata, treating as stale`,
|
||||
);
|
||||
return { isStale: true, metadata: undefined };
|
||||
}
|
||||
|
||||
// Different hostname — can't verify PID remotely
|
||||
if (metadata.hostname !== os.hostname()) {
|
||||
throw new IncompatibleHostnameMultiProcessMutexError(
|
||||
lockPath,
|
||||
metadata.hostname,
|
||||
os.hostname(),
|
||||
);
|
||||
}
|
||||
|
||||
// Different platform — can't verify PID across platforms
|
||||
if (metadata.platform !== process.platform) {
|
||||
throw new IncompatiblePlatformMultiProcessMutexError(
|
||||
lockPath,
|
||||
metadata.platform,
|
||||
process.platform,
|
||||
);
|
||||
}
|
||||
|
||||
// Different uid — can't remove a lock owned by another user
|
||||
const currentUid = process.getuid?.();
|
||||
if (
|
||||
metadata.uid !== undefined &&
|
||||
currentUid !== undefined &&
|
||||
metadata.uid !== currentUid
|
||||
) {
|
||||
throw new IncompatibleUidMultiProcessMutexError(
|
||||
lockPath,
|
||||
metadata.uid,
|
||||
currentUid,
|
||||
);
|
||||
}
|
||||
|
||||
// PID liveness check
|
||||
if (!this.#isProcessAlive(metadata.pid)) {
|
||||
log(`Lock at ${lockPath} owned by dead process PID=${metadata.pid}`);
|
||||
return { isStale: true, metadata };
|
||||
}
|
||||
|
||||
// Process is alive, lock is not stale
|
||||
return { isStale: false };
|
||||
}
|
||||
|
||||
#tryUnlockingStaleLock(metadata: LockMetadata | undefined): boolean {
|
||||
const lockPath = this.#lockFilePath;
|
||||
|
||||
try {
|
||||
fs.unlinkSync(lockPath);
|
||||
log(`Removed stale lock at ${lockPath}`);
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
|
||||
if (e.code === "ENOENT") {
|
||||
// Already removed by another process — safe to retry acquisition
|
||||
log(`Stale lock at ${lockPath} already removed by another process`);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (e.code === "EACCES" || e.code === "EPERM" || e.code === "EBUSY") {
|
||||
throw new StaleMultiProcessMutexError(lockPath, metadata?.uid, e);
|
||||
}
|
||||
|
||||
throw new MultiProcessMutexError(lockPath, e);
|
||||
}
|
||||
|
||||
// Best-effort cleanup of orphaned temp files from dead processes
|
||||
this.#cleanupDeadProcessTempFiles();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a process with the given PID is alive using signal 0, which is
|
||||
* a platform-independent existence check supported on both POSIX and Windows.
|
||||
*
|
||||
* `ESRCH` means the process doesn't exist. `EPERM` means it exists but
|
||||
* belongs to another user — still alive.
|
||||
*/
|
||||
#isProcessAlive(pid: number): boolean {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (e) {
|
||||
ensureNodeErrnoExceptionError(e);
|
||||
if (e.code === "ESRCH") {
|
||||
return false; // Process does not exist
|
||||
}
|
||||
// EPERM means the process exists but we don't have permission to signal it
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
#buildMetadata(): LockMetadata {
|
||||
return {
|
||||
pid: process.pid,
|
||||
hostname: os.hostname(),
|
||||
createdAt: Date.now(),
|
||||
...(process.getuid !== undefined ? { uid: process.getuid() } : {}),
|
||||
platform: process.platform,
|
||||
sessionId: PROCESS_SESSION_ID,
|
||||
};
|
||||
}
|
||||
|
||||
#readMetadata(): LockMetadata | undefined {
|
||||
try {
|
||||
const content = fs.readFileSync(this.#lockFilePath, "utf8");
|
||||
const parsed: unknown = JSON.parse(content);
|
||||
|
||||
if (
|
||||
typeof parsed !== "object" ||
|
||||
parsed === null ||
|
||||
!("pid" in parsed) ||
|
||||
!("hostname" in parsed) ||
|
||||
!("createdAt" in parsed) ||
|
||||
!("platform" in parsed) ||
|
||||
typeof parsed.pid !== "number" ||
|
||||
typeof parsed.hostname !== "string" ||
|
||||
typeof parsed.createdAt !== "number" ||
|
||||
typeof parsed.platform !== "string" ||
|
||||
Number.isSafeInteger(parsed.pid) === false ||
|
||||
parsed.pid < 1 ||
|
||||
Number.isSafeInteger(parsed.createdAt) === false ||
|
||||
parsed.createdAt < 1 ||
|
||||
("uid" in parsed &&
|
||||
parsed.uid !== undefined &&
|
||||
(typeof parsed.uid !== "number" ||
|
||||
Number.isSafeInteger(parsed.uid) === false)) ||
|
||||
("sessionId" in parsed && typeof parsed.sessionId !== "string")
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- We just validated it
|
||||
return parsed as LockMetadata;
|
||||
} catch {
|
||||
// Missing file, corrupt JSON, permission error — all treated as "no valid metadata"
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Best-effort cleanup of orphaned temp files left by dead processes.
|
||||
*
|
||||
* Scans the parent directory for all temp files matching this lock's naming
|
||||
* pattern (`{baseName}.tmp-{pid}-...`), parses the PID from each filename,
|
||||
* and removes files whose PID is no longer alive. Files with unparseable
|
||||
* PIDs are left untouched (conservative — don't delete what we can't verify).
|
||||
*
|
||||
* This is safe because the class is single-host-only (cross-host usage
|
||||
* throws `IncompatibleHostnameMultiProcessMutexError`).
|
||||
*/
|
||||
#cleanupDeadProcessTempFiles(): void {
|
||||
const parentDir = path.dirname(this.#lockFilePath);
|
||||
const baseName = path.basename(this.#lockFilePath);
|
||||
const prefix = `${baseName}.tmp-`;
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(parentDir);
|
||||
for (const entry of entries) {
|
||||
if (!entry.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse PID from filename: {baseName}.tmp-{pid}-{sessionId}-{ts}-{rand}
|
||||
const afterPrefix = entry.slice(prefix.length);
|
||||
const pidStr = afterPrefix.split("-")[0];
|
||||
const pid = Number(pidStr);
|
||||
|
||||
if (!Number.isSafeInteger(pid) || pid < 1) {
|
||||
// Can't verify liveness — leave file alone
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.#isProcessAlive(pid)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(path.join(parentDir, entry));
|
||||
log(`Cleaned up orphaned temp file: ${entry}`);
|
||||
} catch {
|
||||
// Best effort
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Best effort — parent directory may not be readable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A class that implements an asynchronous mutex (mutual exclusion) lock.
|
||||
*
|
||||
* The mutex ensures that only one asynchronous operation can be executed at a time,
|
||||
* providing exclusive access to a shared resource.
|
||||
*/
|
||||
export class AsyncMutex {
|
||||
#acquired = false;
|
||||
readonly #queue: Array<() => void> = [];
|
||||
|
||||
/**
|
||||
* Acquires the mutex, running the provided function exclusively,
|
||||
* and releasing it afterwards.
|
||||
*
|
||||
* @param f The function to run.
|
||||
* @returns The result of the function.
|
||||
*/
|
||||
public async exclusiveRun<ReturnT>(
|
||||
f: () => ReturnT,
|
||||
): Promise<Awaited<ReturnT>> {
|
||||
const release = await this.#acquire();
|
||||
|
||||
try {
|
||||
return await f();
|
||||
} finally {
|
||||
await release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquires the mutex, returning a function that releases it.
|
||||
*/
|
||||
async #acquire(): Promise<() => Promise<void>> {
|
||||
if (!this.#acquired) {
|
||||
this.#acquired = true;
|
||||
return async () => {
|
||||
this.#acquired = false;
|
||||
const next = this.#queue.shift();
|
||||
if (next !== undefined) {
|
||||
next();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return new Promise<() => Promise<void>>((resolve) => {
|
||||
this.#queue.push(() => {
|
||||
resolve(this.#acquire());
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user