refactor: move brother_node development artifact to dev/test-nodes subdirectory

Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
2026-03-30 17:09:06 +02:00
parent bf730dcb4a
commit 816e258d4c
11734 changed files with 2001707 additions and 0 deletions

381
dev/env/node_modules/micro-eth-signer/src/_type_test.ts generated vendored Executable file
View File

@@ -0,0 +1,381 @@
import * as P from 'micro-packed';
import * as abi from './abi/decoder.ts';
import * as typed from './typed-data.ts';
// Should not be included in npm package, just for test of typescript compilation
const assertType = <T>(_value: T) => {};
const BytesVal = new Uint8Array();
const BigIntVal = BigInt(0);
const StringVal = 'string';
StringVal;
export type Bytes = Uint8Array;
// as const returns readonly stuff, remove readonly property
type Writable<T> = T extends {}
? {
-readonly [P in keyof T]: Writable<T[P]>;
}
: T;
type A = Writable<Uint8Array>;
const _a: A = Uint8Array.from([]);
_a;
// IsEmptyArray
const isEmpty = <T>(a: T): abi.IsEmptyArray<T> => a as any;
assertType<true>(isEmpty([] as const));
assertType<false>(isEmpty([1] as const));
assertType<false>(isEmpty(['a', 2] as const));
assertType<false>(isEmpty(['a']));
assertType<true>(isEmpty([] as unknown as []));
assertType<false>(isEmpty([] as unknown as [number]));
assertType<false>(isEmpty([] as unknown as [string, number]));
assertType<false>(isEmpty([] as unknown as Array<string>));
assertType<false>(isEmpty([] as never[]));
assertType<false>(isEmpty([] as any[]));
assertType<true>(isEmpty([] as unknown as undefined));
assertType<true>(isEmpty(undefined));
const t = [
{
type: 'constructor',
inputs: [{ name: 'a', type: 'uint256' }],
stateMutability: 'nonpayable',
},
];
assertType<false>(isEmpty(t));
// Tests
assertType<P.CoderType<string>>(abi.mapComponent({ type: 'string' } as const));
assertType<P.CoderType<string[]>>(abi.mapComponent({ type: 'string[]' } as const));
assertType<P.CoderType<Uint8Array>>(abi.mapComponent({ type: 'bytes' } as const));
assertType<P.CoderType<Uint8Array[]>>(abi.mapComponent({ type: 'bytes[]' } as const));
assertType<P.CoderType<string>>(abi.mapComponent({ type: 'address' } as const));
assertType<P.CoderType<string[]>>(abi.mapComponent({ type: 'address[]' } as const));
assertType<P.CoderType<boolean>>(abi.mapComponent({ type: 'bool' } as const));
assertType<P.CoderType<boolean[]>>(abi.mapComponent({ type: 'bool[]' } as const));
assertType<P.CoderType<bigint>>(abi.mapComponent({ type: 'uint16' } as const));
assertType<P.CoderType<bigint[]>>(abi.mapComponent({ type: 'uint16[]' } as const));
assertType<P.CoderType<bigint>>(abi.mapComponent({ type: 'int' } as const));
assertType<P.CoderType<bigint[]>>(abi.mapComponent({ type: 'int[]' } as const));
assertType<P.CoderType<bigint>>(abi.mapComponent({ type: 'int24' } as const));
assertType<P.CoderType<bigint[]>>(abi.mapComponent({ type: 'int24[]' } as const));
assertType<P.CoderType<Uint8Array>>(abi.mapComponent({ type: 'bytes1' } as const));
assertType<P.CoderType<Uint8Array[]>>(abi.mapComponent({ type: 'bytes1[]' } as const));
assertType<P.CoderType<Uint8Array>>(abi.mapComponent({ type: 'bytes15' } as const));
assertType<P.CoderType<Uint8Array[]>>(abi.mapComponent({ type: 'bytes15[]' } as const));
// Tuples
assertType<P.CoderType<{ lol: bigint; wut: string }>>(
abi.mapComponent({
type: 'tuple',
components: [
{ type: 'uint16', name: 'lol' },
{ type: 'string', name: 'wut' },
],
} as const)
);
assertType<P.CoderType<[bigint, string]>>(
abi.mapComponent({
type: 'tuple',
components: [{ type: 'uint16', name: 'lol' }, { type: 'string' }],
} as const)
);
//
assertType<P.CoderType<unknown>>(abi.mapComponent({ type: 'tuple' }));
assertType<P.CoderType<unknown>>(abi.mapComponent({ type: 'int25' }));
assertType<P.CoderType<unknown>>(abi.mapComponent({ type: 'bytes0' }));
// Args
// If single arg -- use as is
assertType<abi.ArgsType<[{ type: 'bytes' }]>>(BytesVal);
// no names -> tuple
assertType<abi.ArgsType<[{ type: 'bytes' }, { type: 'uint' }]>>([BytesVal, BigIntVal]);
// has names -> struct
assertType<abi.ArgsType<[{ type: 'bytes'; name: 'lol' }, { type: 'uint'; name: 'wut' }]>>({
lol: BytesVal,
wut: BigIntVal,
});
// WHY?!
assertType<P.CoderType<string>>(abi.mapArgs([{ type: 'string' }] as const));
assertType<P.CoderType<Bytes>>(abi.mapArgs([{ type: 'bytes1' }] as const));
assertType<P.CoderType<[string, bigint]>>(
abi.mapArgs([{ type: 'string' }, { type: 'uint' }] as const)
);
assertType<P.CoderType<{ lol: string; wut: bigint }>>(
abi.mapArgs([
{ type: 'string', name: 'lol' },
{ type: 'uint', name: 'wut' },
] as const)
);
// Without const
assertType<P.CoderType<Record<string, unknown>>>(
abi.mapArgs([
{ type: 'string', name: 'lol' },
{ type: 'uint', name: 'wut' },
])
);
assertType<P.CoderType<unknown[]>>(abi.mapArgs([{ type: 'string' }, { type: 'uint' }]));
// unfortunately, typescript cannot detect single value arr on non-const data
assertType<P.CoderType<unknown[]>>(abi.mapArgs([{ type: 'bytes1' }]));
assertType<{
lol: {
encodeInput: (v: [bigint, string]) => Bytes;
decodeOutput: (b: Bytes) => [Bytes, string];
};
}>(
abi.createContract([
{
name: 'lol',
type: 'function',
inputs: [{ type: 'uint' }, { type: 'string' }],
outputs: [{ type: 'bytes' }, { type: 'address' }],
},
] as const)
);
assertType<{
lol: {
encodeInput: (v: undefined) => Bytes;
decodeOutput: (b: Bytes) => [Bytes, string];
};
}>(
abi.createContract([
{
name: 'lol',
type: 'function',
outputs: [{ type: 'bytes' }, { type: 'address' }],
},
] as const)
);
assertType<{
lol: {
encodeInput: (v: undefined) => Bytes;
decodeOutput: (b: Bytes) => [Bytes, string];
};
}>(
abi.createContract([
{
name: 'lol',
type: 'function',
inputs: [] as const,
outputs: [{ type: 'bytes' }, { type: 'address' }],
},
] as const)
);
assertType<{
lol: {
encodeInput: (v: [bigint, string]) => Bytes;
decodeOutput: (b: Bytes) => [Bytes, string];
call: (v: [bigint, string]) => Promise<[Bytes, string]>;
estimateGas: (v: [bigint, string]) => Promise<bigint>;
};
}>(
abi.createContract(
[
{
name: 'lol',
type: 'function',
inputs: [{ type: 'uint' }, { type: 'string' }],
outputs: [{ type: 'bytes' }, { type: 'address' }],
},
] as const,
1 as any
)
);
// Without const there is not much can be derived from abi
assertType<{}>(
abi.createContract([
{
name: 'lol',
type: 'function',
inputs: [{ type: 'uint' }, { type: 'string' }],
outputs: [{ type: 'bytes' }, { type: 'address' }],
},
])
);
const PAIR_CONTRACT = [
{
type: 'function',
name: 'getReserves',
outputs: [
{ name: 'reserve0', type: 'uint112' },
{ name: 'reserve1', type: 'uint112' },
{ name: 'blockTimestampLast', type: 'uint32' },
],
},
] as const;
assertType<{
getReserves: {
encodeInput: () => Bytes;
decodeOutput: (b: Bytes) => { reserve0: bigint; reserve1: bigint; blockTimestampLast: bigint };
};
}>(abi.createContract(PAIR_CONTRACT));
const TRANSFER_EVENT = [
{
anonymous: false,
inputs: [
{ indexed: true, name: 'from', type: 'address' },
{ indexed: true, name: 'to', type: 'address' },
{ indexed: false, name: 'value', type: 'uint256' },
],
name: 'Transfer',
type: 'event',
},
] as const;
assertType<{
Transfer: {
decode: (topics: string[], data: string) => { from: string; to: string; value: bigint };
topics: (values: {
from: string | null;
to: string | null;
value: bigint | null;
}) => (string | null)[];
};
}>(abi.events(TRANSFER_EVENT));
// Typed data
const types = {
Person: [
{ name: 'name', type: 'string' },
{ name: 'wallet', type: 'address' },
] as const,
Mail: [
{ name: 'from', type: 'Person' },
{ name: 'to', type: 'Person' },
{ name: 'contents', type: 'string' },
] as const,
Group: [
{ name: 'members', type: 'Person[]' },
{ name: 'owner', type: 'Person' },
] as const,
Complex0: [
{ name: 'data', type: 'string[][]' }, // Complex array type
{ name: 'info', type: 'Mail' },
] as const,
Complex1: [
{ name: 'data', type: 'string[][][]' }, // Complex array type
{ name: 'info', type: 'Mail' },
] as const,
Complex: [
{ name: 'data', type: 'string[][3][]' }, // Complex array type
{ name: 'info', type: 'Mail' },
] as const,
} as const;
assertType<{
from?: { name: string; wallet: string };
to?: { name: string; wallet: string };
contents: string;
}>(1 as any as typed.GetType<typeof types, 'Mail'>);
assertType<{
name: string;
wallet: string;
}>(1 as any as typed.GetType<typeof types, 'Person'>);
assertType<{
members: ({ name: string; wallet: string } | undefined)[];
owner?: { name: string; wallet: string };
}>(1 as any as typed.GetType<typeof types, 'Group'>);
assertType<{
data: string[][];
info?: {
from?: { name: string; wallet: string };
to?: { name: string; wallet: string };
contents: string;
};
}>(1 as any as typed.GetType<typeof types, 'Complex0'>);
assertType<{
data: string[][][];
info?: {
from?: { name: string; wallet: string };
to?: { name: string; wallet: string };
contents: string;
};
}>(1 as any as typed.GetType<typeof types, 'Complex1'>);
assertType<{
data: string[][][];
info?: {
from?: { name: string; wallet: string };
to?: { name: string; wallet: string };
contents: string;
};
}>(1 as any as typed.GetType<typeof types, 'Complex'>);
const recursiveTypes = {
Node: [
{ name: 'value', type: 'string' },
{ name: 'children', type: 'Node[]' },
] as const,
} as const;
type NodeType = typed.GetType<typeof recursiveTypes, 'Node'>;
assertType<{
value: string;
children: (NodeType | undefined)[];
}>(1 as any as typed.GetType<typeof recursiveTypes, 'Node'>);
// const e = typed.encoder(types);
// e.encodeData('Person', { name: 'test', wallet: 'x' });
// e.sign({ primaryType: 'Person', message: { name: 'test', wallet: 'x' }, domain: {} }, '');
// e.encodeData('Person', { name: 'test', wallet: 1n }); // should fail
// e.sign({ primaryType: 'Person', message: {name: 'test'}, domain: {} }, ''); // should fail
// e.sign({ primaryType: 'Person', message: {name: 'test', wallet: '', s: 3}, domain: {} }, ''); // should fail
// constructor
abi.deployContract(
[{ type: 'constructor', inputs: [], stateMutability: 'nonpayable' }] as const,
'0x00'
);
abi.deployContract([{ type: 'constructor', stateMutability: 'nonpayable' }] as const, '0x00');
// abi.deployContract(
// [{ type: 'constructor', stateMutability: 'nonpayable' }] as const,
// '0x00',
// undefined
// ); // should fail!
abi.deployContract([{ type: 'constructor', stateMutability: 'nonpayable' }], '0x00', undefined); // if we cannot infer type - it will be 'unknown' (and user forced to provide any argument, undefined is ok)
abi.deployContract(
[
{
type: 'constructor',
inputs: [{ name: 'a', type: 'uint256' }],
stateMutability: 'nonpayable',
},
] as const,
'0x00',
BigInt(100)
);
abi.deployContract(
[
{
type: 'constructor',
inputs: [{ name: 'a', type: 'uint256' }],
stateMutability: 'nonpayable',
},
],
'0x00',
BigInt(100)
);

29
dev/env/node_modules/micro-eth-signer/src/abi/common.ts generated vendored Executable file
View File

@@ -0,0 +1,29 @@
import type { ContractABI, HintFn, HookFn } from './decoder.ts';
export function addHint<T extends ContractABI>(abi: ContractABI, name: string, fn: HintFn): T {
const res = [];
for (const elm of abi) {
if (elm.name === name) res.push({ ...elm, hint: fn });
else res.push(elm);
}
return res as unknown as T;
}
export function addHints<T extends ContractABI>(abi: T, map: Record<string, HintFn>): T {
const res = [];
for (const elm of abi) {
if (['event', 'function'].includes(elm.type) && elm.name && map[elm.name]) {
res.push({ ...elm, hint: map[elm.name!] });
} else res.push(elm);
}
return res as unknown as T;
}
export function addHook<T extends ContractABI>(abi: T, name: string, fn: HookFn): T {
const res = [];
for (const elm of abi) {
if (elm.type === 'function' && elm.name === name) res.push({ ...elm, hook: fn });
else res.push(elm);
}
return res as unknown as T;
}

659
dev/env/node_modules/micro-eth-signer/src/abi/decoder.ts generated vendored Executable file
View File

@@ -0,0 +1,659 @@
import { keccak_256 } from '@noble/hashes/sha3';
import { bytesToHex, concatBytes, hexToBytes } from '@noble/hashes/utils';
import * as P from 'micro-packed';
import {
type IWeb3Provider,
type Web3CallArgs,
add0x,
ethHex,
omit,
strip0x,
zip,
} from '../utils.ts';
/*
There is NO network code in the file. However, a user can pass
NetProvider instance to createContract, and the method would do
network requests with the api.
There is some really crazy stuff going on here with Typescript types.
*/
function EPad<T>(p: P.CoderType<T>) {
return P.padLeft(32, p, P.ZeroPad);
}
// Main difference between regular array: length stored outside and offsets calculated without length
function ethArray<T>(inner: P.CoderType<T>): P.CoderType<T[]> {
return P.wrap({
size: undefined,
encodeStream: (w: P.Writer, value: T[]) => {
U256BE_LEN.encodeStream(w, value.length);
w.bytes(P.array(value.length, inner).encode(value));
},
decodeStream: (r: P.Reader): T[] =>
P.array(U256BE_LEN.decodeStream(r), inner).decodeStream(r.offsetReader(r.pos)),
});
}
const PTR = EPad(P.U32BE);
const ARRAY_RE = /(.+)(\[(\d+)?\])$/; // TODO: is this correct?
// Because u32 in eth is not real u32, just U256BE with limits...
const ethInt = (bits: number, signed = false) => {
if (!Number.isSafeInteger(bits) || bits <= 0 || bits % 8 !== 0 || bits > 256)
throw new Error('ethInt: invalid numeric type');
const _bits = BigInt(bits);
const inner = P.bigint(32, false, signed);
return P.validate(
P.wrap({
size: inner.size,
encodeStream: (w: P.Writer, value: bigint) => inner.encodeStream(w, value),
decodeStream: (r: P.Reader): bigint => inner.decodeStream(r),
}),
(value) => {
// TODO: validate useful for narrowing types, need to add support in types?
if (typeof value === 'number') value = BigInt(value);
P.utils.checkBounds(value, _bits, !!signed);
return value;
}
);
};
// Ugly hack, because tuple of pointers considered "dynamic" without any reason.
function isDyn<T>(args: P.CoderType<T>[] | Record<string, P.CoderType<T>>) {
let res = false;
if (Array.isArray(args)) {
for (let arg of args) if (arg.size === undefined) res = true;
} else {
for (let arg in args) if (args[arg].size === undefined) res = true;
}
return res;
}
// as const returns readonly stuff, remove readonly property
type Writable<T> = T extends {}
? {
-readonly [P in keyof T]: Writable<T[P]>;
}
: T;
type ArrLike<T> = Array<T> | ReadonlyArray<T>;
export type IsEmptyArray<T> =
T extends ReadonlyArray<any> ? (T['length'] extends 0 ? true : false) : true;
export type Component<T extends string> = {
readonly name?: string;
readonly type: T;
};
export type NamedComponent<T extends string> = Component<T> & { readonly name: string };
export type BaseComponent = Component<string>;
export type Tuple<TC extends ArrLike<Component<string>>> = {
readonly name?: string;
readonly type: 'tuple';
readonly components: TC;
};
// Basic type support
// int<M>: twos complement signed integer type of M bits, 0 < M <= 256, M % 8 == 0.
// prettier-ignore
type IntIdxType = '' | '8' | '16' | '24' | '32' | '40' | '48' | '56' |
'64' | '72' | '80' | '88' | '96' | '104' | '112' | '120' | '128' | '136' |
'144' | '152' | '160' | '168' | '176' | '184' | '192' | '200' | '208' | '216' |
'224' | '232' | '240' | '248' | '256';
type UintType = `uint${IntIdxType}`;
type IntType = `int${IntIdxType}`;
type NumberType = UintType | IntType;
// bytes<M>: binary type of M bytes, 0 < M <= 32.
// prettier-ignore
type ByteIdxType = '' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' |
'10' | '11' | '12' | '13' | '14' | '15' | '16' | '17' | '18' | '19' | '20' | '21' |
'22' | '23' | '24' | '25' | '26' | '27' | '28' | '29' | '30' | '31' | '32';
type ByteType = `bytes${ByteIdxType}`;
// [{name: 'a', type: 'string'}, {name: 'b', type: 'uint'}] -> {a: string, b: bigint};
export type MapTuple<T> =
T extends ArrLike<Component<string> & { name: string }>
? {
[K in T[number] as K['name']]: MapType<K>;
}
: T extends ArrLike<Component<string>>
? // [{name: 'a', type: 'string'}, {type: 'uint'}] -> [string, bigint];
{
[K in keyof T]: T[K] extends BaseComponent ? MapType<T[K]> : unknown;
}
: unknown;
// prettier-ignore
export type GetType<T extends string> =
T extends `${infer Base}[]${infer Rest}` ? GetType<`${Base}${Rest}`>[] : // 'string[]' -> 'string'[]
T extends `${infer Base}[${number}]${infer Rest}` ? GetType<`${Base}${Rest}`>[] : // 'string[3]' -> 'string'[]
T extends 'address' ? string :
T extends 'string' ? string :
T extends 'bool' ? boolean :
T extends NumberType ? bigint :
T extends ByteType ? Uint8Array :
unknown; // default
// prettier-ignore
export type MapType<T extends BaseComponent> =
T extends Tuple<Array<Component<string>>> ? MapTuple<T['components']> :
T extends Component<infer Type> ? GetType<Type> :
unknown; // default
// Re-use ptr for len. u32 should be enough.
const U256BE_LEN = PTR;
export type UnmapType<T> = T extends MapType<infer U> ? U : never;
// NOTE: we need as const if we want to access string as values inside types :(
export function mapComponent<T extends BaseComponent>(c: T): P.CoderType<MapType<Writable<T>>> {
// Arrays (should be first one, since recursive)
let m;
if ((m = ARRAY_RE.exec(c.type))) {
const inner = mapComponent({ ...c, type: m[1] });
if (inner.size === 0)
throw new Error('mapComponent: arrays of zero-size elements disabled (possible DoS attack)');
// Static array
if (m[3] !== undefined) {
const m3 = Number.parseInt(m[3]);
if (!Number.isSafeInteger(m3)) throw new Error(`mapComponent: wrong array size=${m[3]}`);
let out = P.array(m3, inner);
// Static array of dynamic values should be behind pointer too, again without reason.
if (inner.size === undefined) out = P.pointer(PTR, out);
return out as any;
} else {
// Dynamic array
return P.pointer(PTR, ethArray(inner)) as any;
}
}
if (c.type === 'tuple') {
const components: (Component<string> & { name?: string })[] = (c as any).components;
let hasNames = true;
const args: P.CoderType<any>[] = [];
for (let comp of components) {
if (!comp.name) hasNames = false;
args.push(mapComponent(comp));
}
let out: any;
// If there is names for all fields -- return struct, otherwise tuple
if (hasNames) {
const struct: Record<string, P.CoderType<unknown>> = {};
for (const arg of components) {
if (struct[arg.name!]) throw new Error(`mapType: same field name=${arg.name}`);
struct[arg.name!] = mapComponent(arg);
}
out = P.struct(struct);
} else out = P.tuple(args);
// If tuple has dynamic elements it becomes dynamic too, without reason.
if (isDyn(args)) out = P.pointer(PTR, out);
return out;
}
if (c.type === 'string')
return P.pointer(PTR, P.padRight(32, P.string(U256BE_LEN), P.ZeroPad)) as any;
if (c.type === 'bytes')
return P.pointer(PTR, P.padRight(32, P.bytes(U256BE_LEN), P.ZeroPad)) as any;
if (c.type === 'address') return EPad(P.hex(20, { isLE: false, with0x: true })) as any;
if (c.type === 'bool') return EPad(P.bool) as any;
if ((m = /^(u?)int([0-9]+)?$/.exec(c.type)))
return ethInt(m[2] ? +m[2] : 256, m[1] !== 'u') as any;
if ((m = /^bytes([0-9]{1,2})$/.exec(c.type))) {
const parsed = +m[1];
if (!parsed || parsed > 32) throw new Error('wrong bytes<N> type');
return P.padRight(32, P.bytes(parsed), P.ZeroPad) as any;
}
throw new Error(`mapComponent: unknown component=${c}`);
}
// If only one arg -- use as is, otherwise construct tuple by tuple rules
export type ArgsType<T extends ReadonlyArray<any> | undefined> =
IsEmptyArray<T> extends true
? undefined // empty arr
: T extends ReadonlyArray<any>
? T['length'] extends 1 // single elm
? MapType<T[0]>
: MapTuple<T>
: MapTuple<T>;
// Because args and output are not tuple
// TODO: try merge with mapComponent
export function mapArgs<T extends ArrLike<Component<string>>>(
args: T
): P.CoderType<ArgsType<Writable<T>>> {
// More ergonomic input/output
if (args.length === 1) return mapComponent(args[0] as any) as any;
let hasNames = true;
for (const arg of args) if (!arg.name) hasNames = false;
if (hasNames) {
const out: Record<string, P.CoderType<unknown>> = {};
for (const arg of args) {
const name = (arg as any).name;
if (out[name]) throw new Error(`mapArgs: same field name=${name}`);
out[name] = mapComponent(arg as any) as any;
}
return P.struct(out) as any;
} else return P.tuple(args.map(mapComponent)) as any;
}
export type FunctionType = Component<'function'> & {
readonly inputs?: ReadonlyArray<Component<string>>;
readonly outputs?: ReadonlyArray<Component<string>>;
};
type ContractMethodDecode<T extends FunctionType, O = ArgsType<T['outputs']>> =
IsEmptyArray<T['outputs']> extends true
? {
decodeOutput: (b: Uint8Array) => void;
}
: { decodeOutput: (b: Uint8Array) => O };
type ContractMethodEncode<T extends FunctionType, I = ArgsType<T['inputs']>> =
IsEmptyArray<T['inputs']> extends true
? { encodeInput: () => Uint8Array }
: { encodeInput: (v: I) => Uint8Array };
type ContractMethodGas<T extends FunctionType, I = ArgsType<T['inputs']>> =
IsEmptyArray<T['inputs']> extends true
? { estimateGas: () => Promise<bigint> }
: { estimateGas: (v: I) => Promise<bigint> };
type ContractMethodCall<
T extends FunctionType,
I = ArgsType<T['inputs']>,
O = ArgsType<T['outputs']>,
> =
IsEmptyArray<T['inputs']> extends true
? IsEmptyArray<T['outputs']> extends true
? {
// no inputs, no outputs
call: () => Promise<void>;
}
: {
// no inputs, outputs
call: () => Promise<O>;
}
: IsEmptyArray<T['outputs']> extends true
? {
// inputs, no outputs
call: (v: I) => Promise<void>;
}
: {
// inputs, outputs
call: (v: I) => Promise<O>;
};
export type ContractMethod<T extends FunctionType> = ContractMethodEncode<T> &
ContractMethodDecode<T>;
export type ContractMethodNet<T extends FunctionType> = ContractMethod<T> &
ContractMethodGas<T> &
ContractMethodCall<T>;
export type FnArg = {
readonly type: string;
readonly name?: string;
readonly components?: ArrLike<FnArg>;
readonly inputs?: ArrLike<FnArg>;
readonly outputs?: ArrLike<FnArg>;
readonly anonymous?: boolean;
readonly indexed?: boolean;
};
export type ContractTypeFilter<T> = {
[K in keyof T]: T[K] extends FunctionType & { name: string } ? T[K] : never;
};
export type ContractType<T extends Array<FnArg>, N, F = ContractTypeFilter<T>> =
F extends ArrLike<FunctionType & { name: string }>
? {
[K in F[number] as K['name']]: N extends IWeb3Provider
? ContractMethodNet<K>
: ContractMethod<K>;
}
: never;
function fnSignature(o: FnArg): string {
if (!o.type) throw new Error('ABI.fnSignature wrong argument');
if (o.type === 'function' || o.type === 'event')
return `${o.name || 'function'}(${(o.inputs || []).map((i) => fnSignature(i)).join(',')})`;
if (o.type.startsWith('tuple')) {
if (!o.components || !o.components.length) throw new Error('ABI.fnSignature wrong tuple');
return `(${o.components.map((i) => fnSignature(i)).join(',')})${o.type.slice(5)}`;
}
return o.type;
}
// Function signature hash
export function evSigHash(o: FnArg): string {
return bytesToHex(keccak_256(fnSignature(o)));
}
export function fnSigHash(o: FnArg): string {
return evSigHash(o).slice(0, 8);
}
// High-level constructs for common ABI use-cases
/*
Call functions always takes two args: array/obj of input values and overrdides of tx params
output is array/obj too, but if there is single input or output, then they processed as-is without wrapping in array/obj.
if there is at least one named input/output (like (uin256 balance, address)) then it is processed as object, where unnamed elements
is refered by index position. Unfortunately it is impossible to do args/kwargs, since named arguments can be before unnamed one.
*/
export function createContract<T extends ArrLike<FnArg>>(
abi: T,
net: IWeb3Provider,
contract?: string
): ContractType<Writable<T>, IWeb3Provider>;
export function createContract<T extends ArrLike<FnArg>>(
abi: T,
net?: undefined,
contract?: string
): ContractType<Writable<T>, undefined>;
export function createContract<T extends ArrLike<FnArg>>(
abi: T,
net?: IWeb3Provider,
contract?: string
): ContractType<Writable<T>, undefined> {
// Find non-uniq function names so we can handle overloads
let nameCnt: Record<string, number> = {};
for (let fn of abi) {
if (fn.type !== 'function') continue;
const name = fn.name || 'function';
if (!nameCnt[name]) nameCnt[name] = 1;
else nameCnt[name]++;
}
const res: Record<string, any> = {};
for (let fn of abi) {
if (fn.type !== 'function') continue;
let name = fn.name || 'function';
if (nameCnt[name] > 1) name = fnSignature(fn);
const sh = fnSigHash(fn);
const inputs = fn.inputs && fn.inputs.length ? mapArgs(fn.inputs) : undefined;
const outputs = fn.outputs ? mapArgs(fn.outputs) : undefined;
const decodeOutput = (b: Uint8Array) => outputs && outputs.decode(b);
const encodeInput = (v: unknown) =>
concatBytes(hexToBytes(sh), inputs ? inputs.encode(v as any) : new Uint8Array());
res[name] = { decodeOutput, encodeInput };
// .call and .estimateGas call network, when net is available
if (!net) continue;
res[name].call = async (args: unknown, overrides: Web3CallArgs = {}) => {
if (!contract && !overrides.to) throw new Error('No contract address');
const data = add0x(bytesToHex(encodeInput(args)));
const callArgs = Object.assign({ to: contract, data }, overrides);
return decodeOutput(hexToBytes(strip0x(await net.ethCall(callArgs))));
};
res[name].estimateGas = async (args: unknown, overrides: Web3CallArgs = {}) => {
if (!contract && !overrides.to) throw new Error('No contract address');
const data = add0x(bytesToHex(encodeInput(args)));
const callArgs = Object.assign({ to: contract, data }, overrides);
return await net.estimateGas(callArgs);
};
}
return res as any;
}
type GetCons<T extends ArrLike<FnArg>> = Extract<T[number], { type: 'constructor' }>;
type ConstructorType = Component<'constructor'> & {
readonly inputs?: ReadonlyArray<Component<string>>;
};
type ConsArgs<T extends ConstructorType> =
IsEmptyArray<T['inputs']> extends true ? undefined : ArgsType<T['inputs']>;
export function deployContract<T extends ArrLike<FnArg>>(
abi: T,
bytecodeHex: string,
...args: GetCons<T> extends never
? [args: unknown]
: ConsArgs<GetCons<T>> extends undefined
? []
: [args: ConsArgs<GetCons<T>>]
): string {
const bytecode = ethHex.decode(bytecodeHex);
let consCall;
for (let fn of abi) {
if (fn.type !== 'constructor') continue;
const inputs = fn.inputs && fn.inputs.length ? mapArgs(fn.inputs) : undefined;
if (inputs === undefined && args !== undefined && args.length)
throw new Error('arguments to constructor without any');
consCall = inputs ? inputs.encode(args[0] as any) : new Uint8Array();
}
if (!consCall) throw new Error('constructor not found');
return ethHex.encode(concatBytes(bytecode, consCall));
}
export type EventType = NamedComponent<'event'> & {
readonly inputs: ReadonlyArray<Component<string>>;
};
export type ContractEventTypeFilter<T> = { [K in keyof T]: T[K] extends EventType ? T[K] : never };
export type TopicsValue<T> = { [K in keyof T]: T[K] | null };
export type EventMethod<T extends EventType> = {
decode: (topics: string[], data: string) => ArgsType<T['inputs']>;
topics: (values: TopicsValue<ArgsType<T['inputs']>>) => (string | null)[];
};
export type ContractEventType<T extends Array<FnArg>, F = ContractEventTypeFilter<T>> =
F extends ArrLike<EventType>
? {
[K in F[number] as K['name']]: EventMethod<K>;
}
: never;
// TODO: try to simplify further
export function events<T extends ArrLike<FnArg>>(abi: T): ContractEventType<Writable<T>> {
let res: Record<string, any> = {};
for (let elm of abi) {
// Only named events supported
if (elm.type !== 'event' || !elm.name) continue;
const inputs = elm.inputs || [];
let hasNames = true;
for (let i of inputs) if (!i.name) hasNames = false;
const plainInp = inputs.filter((i) => !i.indexed);
const indexedInp = inputs.filter((i) => i.indexed);
const indexed = indexedInp.map((i) =>
!['string', 'bytes', 'tuple'].includes(i.type) && !ARRAY_RE.exec(i.type)
? (mapArgs([i]) as any)
: null
);
const parser = mapArgs(hasNames ? plainInp : plainInp.map((i) => omit(i, 'name'))) as any;
const sigHash = evSigHash(elm);
res[elm.name] = {
decode(topics: string[], _data: string) {
const data = hexToBytes(strip0x(_data));
if (!elm.anonymous) {
if (!topics[0]) throw new Error('No signature on non-anonymous event');
if (strip0x(topics[0]).toLowerCase() !== sigHash) throw new Error('Wrong signature');
topics = topics.slice(1);
}
if (topics.length !== indexed.length) throw new Error('Wrong topics length');
let parsed = parser ? parser.decode(data) : hasNames ? {} : [];
const indexedParsed = indexed.map((p, i) =>
p ? p.decode(hexToBytes(strip0x(topics[i]))) : topics[i]
);
if (plainInp.length === 1) parsed = hasNames ? { [plainInp[0].name!]: parsed } : [parsed];
if (hasNames) {
let res = { ...parsed };
for (let [a, p] of zip(indexedInp, indexedParsed)) res[a.name!] = p;
return res;
} else return inputs.map((i) => (!i.indexed ? parsed : indexedParsed).shift());
},
topics(values: any[] | Record<string, any>) {
let res = [];
if (!elm.anonymous) res.push(add0x(sigHash));
// We require all keys to be set, even if they are null, to be sure nothing is accidentaly missed
if ((hasNames ? Object.keys(values) : values).length !== inputs.length)
throw new Error('Wrong topics args');
for (let i = 0, ii = 0; i < inputs.length && ii < indexed.length; i++) {
const [input, packer] = [inputs[i], indexed[ii]];
if (!input.indexed) continue;
const value = (values as any)[Array.isArray(values) ? i : inputs[i].name!];
if (value === null) {
res.push(null);
continue;
}
let topic: string;
if (packer) topic = bytesToHex(packer.encode(value));
else if (['string', 'bytes'].includes(input.type)) topic = bytesToHex(keccak_256(value));
else {
let m: any, parts: Uint8Array[];
if ((m = ARRAY_RE.exec(input.type)))
parts = value.map((j: any) => mapComponent({ type: m[1] }).encode(j));
else if (input.type === 'tuple' && input.components)
parts = input.components.map((j) => (mapArgs([j]) as any).encode(value[j.name!]));
else throw new Error('Unknown unsized type');
topic = bytesToHex(keccak_256(concatBytes(...parts)));
}
res.push(add0x(topic));
ii++;
}
return res;
},
};
}
return res as any;
}
// Same as 'Transaction Action' on Etherscan, provides human readable interpritation of decoded data
export type ContractABI = ReadonlyArray<FnArg & { readonly hint?: HintFn; readonly hook?: HookFn }>;
export type ContractInfo = {
abi: 'ERC20' | 'ERC721' | 'ERC1155' | ContractABI;
symbol?: string;
decimals?: number;
// For useful common contracts/exchanges
name?: string;
// Stable coin price against USD
price?: number;
};
export type HintOpt = {
contract?: string;
amount?: bigint;
contractInfo?: ContractInfo;
contracts?: Record<string, ContractInfo>;
};
export type HintFn = (value: unknown, opt: HintOpt) => string;
export type HookFn = (
decoder: Decoder,
contract: string,
info: SignatureInfo,
opt: HintOpt
) => SignatureInfo;
type SignaturePacker = {
name: string;
signature: string;
packer: P.CoderType<unknown>;
hint?: HintFn;
// Modifies decoder output. For multicall calls.
hook?: HookFn;
};
type EventSignatureDecoder = {
name: string;
signature: string;
decoder: (topics: string[], _data: string) => unknown;
hint?: HintFn;
};
export type SignatureInfo = { name: string; signature: string; value: unknown; hint?: string };
export class Decoder {
contracts: Record<string, Record<string, SignaturePacker>> = {};
sighashes: Record<string, SignaturePacker[]> = {};
evContracts: Record<string, Record<string, EventSignatureDecoder>> = {};
evSighashes: Record<string, EventSignatureDecoder[]> = {};
add(contract: string, abi: ContractABI): void {
const ev: any = events(abi);
contract = strip0x(contract).toLowerCase();
if (!this.contracts[contract]) this.contracts[contract] = {};
if (!this.evContracts[contract]) this.evContracts[contract] = {};
for (let fn of abi) {
if (fn.type === 'function') {
const selector = fnSigHash(fn);
const value = {
name: fn.name || 'function',
signature: fnSignature(fn),
packer: fn.inputs && fn.inputs.length ? (mapArgs(fn.inputs) as any) : undefined,
hint: fn.hint,
hook: fn.hook,
};
this.contracts[contract][selector] = value;
if (!this.sighashes[selector]) this.sighashes[selector] = [];
this.sighashes[selector].push(value);
} else if (fn.type === 'event') {
if (fn.anonymous || !fn.name) continue;
const selector = evSigHash(fn);
const value = {
name: fn.name,
signature: fnSignature(fn),
decoder: ev[fn.name]?.decode,
hint: fn.hint,
};
this.evContracts[contract][selector] = value;
if (!this.evSighashes[selector]) this.evSighashes[selector] = [];
this.evSighashes[selector].push(value);
}
}
}
method(contract: string, data: Uint8Array): string | undefined {
contract = strip0x(contract).toLowerCase();
const sh = bytesToHex(data.slice(0, 4));
if (!this.contracts[contract] || !this.contracts[contract][sh]) return;
const { name } = this.contracts[contract][sh];
return name;
}
// Returns: exact match, possible options of matches (array) or undefined.
// Note that empty value possible if there is no arguments in call.
decode(
contract: string,
_data: Uint8Array,
opt: HintOpt
): SignatureInfo | SignatureInfo[] | undefined {
contract = strip0x(contract).toLowerCase();
const sh = bytesToHex(_data.slice(0, 4));
const data = _data.slice(4);
if (this.contracts[contract] && this.contracts[contract][sh]) {
let { name, signature, packer, hint, hook } = this.contracts[contract][sh];
const value = packer ? packer.decode(data) : undefined;
let res: SignatureInfo = { name, signature, value };
// NOTE: hint && hook fn is used only on exact match of contract!
if (hook) res = hook(this, contract, res, opt);
try {
if (hint) res.hint = hint(value, Object.assign({ contract: add0x(contract) }, opt));
} catch (e) {}
return res;
}
if (!this.sighashes[sh] || !this.sighashes[sh].length) return;
let res: SignatureInfo[] = [];
for (let { name, signature, packer } of this.sighashes[sh]) {
try {
res.push({ name, signature, value: packer ? packer.decode(data) : undefined });
} catch (err) {}
}
if (res.length) return res;
return;
}
decodeEvent(
contract: string,
topics: string[],
data: string,
opt: HintOpt
): SignatureInfo | SignatureInfo[] | undefined {
contract = strip0x(contract).toLowerCase();
if (!topics.length) return;
const sh = strip0x(topics[0]);
const event = this.evContracts[contract];
if (event && event[sh]) {
let { name, signature, decoder, hint } = event[sh];
const value = decoder(topics, data);
let res: SignatureInfo = { name, signature, value };
try {
if (hint) res.hint = hint(value, Object.assign({ contract: add0x(contract) }, opt));
} catch (e) {}
return res;
}
if (!this.evSighashes[sh] || !this.evSighashes[sh].length) return;
let res: SignatureInfo[] = [];
for (let { name, signature, decoder } of this.evSighashes[sh]) {
try {
res.push({ name, signature, value: decoder(topics, data) });
} catch (err) {}
}
if (res.length) return res;
return;
}
}

7
dev/env/node_modules/micro-eth-signer/src/abi/erc1155.ts generated vendored Executable file
View File

@@ -0,0 +1,7 @@
// Multi Token Standard https://eips.ethereum.org/EIPS/eip-1155
// prettier-ignore
const ABI = [
{name:'ApprovalForAll',type:'event',inputs:[{indexed:true,name:'account',type:'address'},{indexed:true,name:'operator',type:'address'},{indexed:false,name:'approved',type:'bool'},],},{name:'TransferBatch',type:'event',inputs:[{indexed:true,name:'operator',type:'address'},{indexed:true,name:'from',type:'address'},{indexed:true,name:'to',type:'address'},{indexed:false,name:'ids',type:'uint256[]'},{indexed:false,name:'values',type:'uint256[]'},],},{name:'TransferSingle',type:'event',inputs:[{indexed:true,name:'operator',type:'address'},{indexed:true,name:'from',type:'address'},{indexed:true,name:'to',type:'address'},{indexed:false,name:'id',type:'uint256'},{indexed:false,name:'value',type:'uint256'},],},{name:'URI',type:'event',inputs:[{indexed:false,name:'value',type:'string'},{indexed:true,name:'id',type:'uint256'},],},{name:'balanceOf',type:'function',inputs:[{name:'account',type:'address'},{name:'id',type:'uint256'},],outputs:[{type:'uint256'}],},{name:'balanceOfBatch',type:'function',inputs:[{name:'accounts',type:'address[]'},{name:'ids',type:'uint256[]'},],outputs:[{type:'uint256[]'}],},{name:'isApprovedForAll',type:'function',inputs:[{name:'account',type:'address'},{name:'operator',type:'address'},],outputs:[{type:'bool'}],},{name:'safeBatchTransferFrom',type:'function',inputs:[{name:'from',type:'address'},{name:'to',type:'address'},{name:'ids',type:'uint256[]'},{name:'amounts',type:'uint256[]'},{name:'data',type:'bytes'},],outputs:[],},{name:'safeTransferFrom',type:'function',inputs:[{name:'from',type:'address'},{name:'to',type:'address'},{name:'id',type:'uint256'},{name:'amount',type:'uint256'},{name:'data',type:'bytes'},],outputs:[],},{name:'setApprovalForAll',type:'function',inputs:[{name:'operator',type:'address'},{name:'approved',type:'bool'},],outputs:[],},{name:'supportsInterface',type:'function',inputs:[{name:'interfaceId',type:'bytes4'}],outputs:[{type:'bool'}],},{name:'uri',type:'function',inputs:[{name:'id',type:'uint256'}],outputs:[{type:'string'}]}
] as const;
export default ABI;

52
dev/env/node_modules/micro-eth-signer/src/abi/erc20.ts generated vendored Executable file
View File

@@ -0,0 +1,52 @@
import { createDecimal } from '../utils.ts';
import { addHints } from './common.ts';
import { type HintOpt } from './decoder.ts';
// prettier-ignore
export const ABI = [
{type:"function",name:"name",outputs:[{type:"string"}]},{type:"function",name:"totalSupply",outputs:[{type:"uint256"}]},{type:"function",name:"decimals",outputs:[{type:"uint8"}]},{type:"function",name:"symbol",outputs:[{type:"string"}]},{type:"function",name:"approve",inputs:[{name:"spender",type:"address"},{name:"value",type:"uint256"}],outputs:[{name:"success",type:"bool"}]},{type:"function",name:"transferFrom",inputs:[{name:"from",type:"address"},{name:"to",type:"address"},{name:"value",type:"uint256"}],outputs:[{name:"success",type:"bool"}]},{type:"function",name:"balances",inputs:[{type:"address"}],outputs:[{type:"uint256"}]},{type:"function",name:"allowed",inputs:[{type:"address"},{type:"address"}],outputs:[{type:"uint256"}]},{type:"function",name:"balanceOf",inputs:[{name:"owner",type:"address"}],outputs:[{name:"balance",type:"uint256"}]},{type:"function",name:"transfer",inputs:[{name:"to",type:"address"},{name:"value",type:"uint256"}],outputs:[{name:"success",type:"bool"}]},{type:"function",name:"allowance",inputs:[{name:"owner",type:"address"},{name:"spender",type:"address"}],outputs:[{name:"remaining",type:"uint256"}]},{name:"Approval",type:"event",anonymous:false,inputs:[{indexed:true,name:"owner",type:"address"},{indexed:true,name:"spender",type:"address"},{indexed:false,name:"value",type:"uint256"}]},{name:"Transfer",type:"event",anonymous:false,inputs:[{indexed:true,name:"from",type:"address"},{indexed:true,name:"to",type:"address"},{indexed:false,name:"value",type:"uint256"}]}
] as const;
// https://eips.ethereum.org/EIPS/eip-20
export const hints = {
approve(v: any, opt: HintOpt) {
if (!opt.contractInfo || !opt.contractInfo.decimals || !opt.contractInfo.symbol)
throw new Error('Not enough info');
return `Allow spending ${createDecimal(opt.contractInfo.decimals).encode(v.value)} ${
opt.contractInfo.symbol
} by ${v.spender}`;
},
transferFrom(v: any, opt: HintOpt) {
if (!opt.contractInfo || !opt.contractInfo.decimals || !opt.contractInfo.symbol)
throw new Error('Not enough info');
return `Transfer ${createDecimal(opt.contractInfo.decimals).encode(v.value)} ${
opt.contractInfo.symbol
} from ${v.from} to ${v.to}`;
},
transfer(v: any, opt: HintOpt) {
if (!opt.contractInfo || !opt.contractInfo.decimals || !opt.contractInfo.symbol)
throw new Error('Not enough info');
return `Transfer ${createDecimal(opt.contractInfo.decimals).encode(v.value)} ${
opt.contractInfo.symbol
} to ${v.to}`;
},
Approval(v: any, opt: HintOpt) {
if (!opt.contractInfo || !opt.contractInfo.decimals || !opt.contractInfo.symbol)
throw new Error('Not enough info');
return `Allow ${v.spender} spending up to ${createDecimal(opt.contractInfo.decimals).encode(
v.value
)} ${opt.contractInfo.symbol} from ${v.owner}`;
},
Transfer(v: any, opt: HintOpt) {
if (!opt.contractInfo || !opt.contractInfo.decimals || !opt.contractInfo.symbol)
throw new Error('Not enough info');
return `Transfer ${createDecimal(opt.contractInfo.decimals).encode(v.value)} ${
opt.contractInfo.symbol
} from ${v.from} to ${v.to}`;
},
};
const ERC20ABI = /* @__PURE__ */ addHints(ABI, hints);
export default ERC20ABI;

7
dev/env/node_modules/micro-eth-signer/src/abi/erc721.ts generated vendored Executable file
View File

@@ -0,0 +1,7 @@
// Non-Fungible Token Standard: https://eips.ethereum.org/EIPS/eip-721
// prettier-ignore
const ABI = [
{type:"function",name:"approve",inputs:[{name:"approved",type:"address"},{name:"tokenId",type:"uint256"}]},{type:"function",name:"balanceOf",inputs:[{name:"owner",type:"address"}],outputs:[{type:"uint256"}]},{type:"function",name:"ownerOf",inputs:[{name:"tokenId",type:"uint256"}],outputs:[{name:"owner",type:"address"}]},{type:"function",name:"getApproved",inputs:[{name:"tokenId",type:"uint256"}],outputs:[{type:"address"}]},{type:"function",name:"isApprovedForAll",inputs:[{name:"owner",type:"address"},{name:"operator",type:"address"}],outputs:[{type:"bool"}]},{type:"function",name:"safeTransferFrom",inputs:[{name:"from",type:"address"},{name:"to",type:"address"},{name:"tokenId",type:"uint256"}]},{type:"function",name:"safeTransferFrom",inputs:[{name:"from",type:"address"},{name:"to",type:"address"},{name:"tokenId",type:"uint256"},{name:"data",type:"bytes"}]},{type:"function",name:"setApprovalForAll",inputs:[{name:"operator",type:"address"},{name:"approved",type:"bool"}]},{type:"function",name:"supportsInterface",inputs:[{name:"interfaceID",type:"bytes4"}],outputs:[{type:"bool"}]},{type:"function",name:"transferFrom",inputs:[{name:"from",type:"address"},{name:"to",type:"address"},{name:"tokenId",type:"uint256"}]},{type:"function",name:"tokenByIndex",inputs:[{name:"index",type:"uint256"}],outputs:[{type:"uint256"}]},{type:"function",name:"tokenOfOwnerByIndex",inputs:[{name:"owner",type:"address"},{name:"index",type:"uint256"}],outputs:[{type:"uint256"}]},{type:"function",name:"totalSupply",outputs:[{type:"uint256"}]},{type:"function",name:"name",outputs:[{name:"name",type:"string"}]},{type:"function",name:"symbol",outputs:[{name:"symbol",type:"string"}]},{type:"function",name:"tokenURI",inputs:[{name:"tokenId",type:"uint256"}],outputs:[{type:"string"}]},{name:"Transfer",type:"event",inputs:[{indexed:true,name:"from",type:"address"},{indexed:true,name:"to",type:"address"},{indexed:true,name:"tokenId",type:"uint256"}]},{name:"Approval",type:"event",inputs:[{indexed:true,name:"owner",type:"address"},{indexed:true,name:"spender",type:"address"},{indexed:true,name:"tokenId",type:"uint256"}]},{name:"ApprovalForAll",type:"event",inputs:[{indexed:true,name:"owner",type:"address"},{indexed:true,name:"spender",type:"address"},{indexed:false,name:"approved",type:"bool"}]}
] as const;
export default ABI;

161
dev/env/node_modules/micro-eth-signer/src/abi/index.ts generated vendored Executable file
View File

@@ -0,0 +1,161 @@
import { addr } from '../address.ts';
import { Transaction } from '../index.ts';
import { ethHex } from '../utils.ts';
import {
type ContractABI,
type ContractInfo,
Decoder,
createContract,
deployContract,
events,
} from './decoder.ts';
import { default as ERC1155 } from './erc1155.ts';
import { default as ERC20 } from './erc20.ts';
import { default as ERC721 } from './erc721.ts';
import { default as KYBER_NETWORK_PROXY, KYBER_NETWORK_PROXY_CONTRACT } from './kyber.ts';
import { default as UNISWAP_V2_ROUTER, UNISWAP_V2_ROUTER_CONTRACT } from './uniswap-v2.ts';
import { default as UNISWAP_V3_ROUTER, UNISWAP_V3_ROUTER_CONTRACT } from './uniswap-v3.ts';
import { default as WETH, WETH_CONTRACT } from './weth.ts';
// We need to export raw contracts, because 'CONTRACTS' object requires to know address it is not static type
// so it cannot be re-used in createContract with nice types.
export {
ERC1155,
ERC20,
ERC721,
KYBER_NETWORK_PROXY_CONTRACT,
UNISWAP_V2_ROUTER_CONTRACT,
UNISWAP_V3_ROUTER_CONTRACT,
WETH,
};
export { Decoder, createContract, deployContract, events };
// Export decoder related types
export type { ContractABI, ContractInfo };
export const TOKENS: Record<string, ContractInfo> = /* @__PURE__ */ (() =>
Object.freeze(
Object.fromEntries(
(
[
['UNI', '0x1f9840a85d5af5bf1d1762f925bdaddc4201f984'],
['BAT', '0x0d8775f648430679a709e98d2b0cb6250d2887ef'],
// Required for Uniswap multi-hop routing
['USDT', '0xdac17f958d2ee523a2206206994597c13d831ec7', 6, 1],
['USDC', '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48', 6, 1],
['WETH', '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'],
['WBTC', '0x2260fac5e5542a773aa44fbcfedf7c193bc2c599', 8],
['DAI', '0x6b175474e89094c44da98b954eedeac495271d0f', 18, 1],
['COMP', '0xc00e94cb662c3520282e6f5717214004a7f26888'],
['MKR', '0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2'],
['AMPL', '0xd46ba6d942050d489dbd938a2c909a5d5039a161', 9],
] as [string, string, number?, number?][]
).map(([symbol, addr, decimals, price]) => [
addr as string,
{ abi: 'ERC20' as const, symbol, decimals: decimals || 18, price },
])
)
))();
// <address, contractInfo>
export const CONTRACTS: Record<string, ContractInfo> = /* @__PURE__ */ (() =>
Object.freeze({
[UNISWAP_V2_ROUTER_CONTRACT]: { abi: UNISWAP_V2_ROUTER, name: 'UNISWAP V2 ROUTER' },
[KYBER_NETWORK_PROXY_CONTRACT]: { abi: KYBER_NETWORK_PROXY, name: 'KYBER NETWORK PROXY' },
[UNISWAP_V3_ROUTER_CONTRACT]: { abi: UNISWAP_V3_ROUTER, name: 'UNISWAP V3 ROUTER' },
...TOKENS,
[WETH_CONTRACT]: { abi: WETH, name: 'WETH Token', decimals: 18, symbol: 'WETH' },
}))();
export const tokenFromSymbol = (
symbol: string
): {
contract: string;
} & ContractInfo => {
for (let c in TOKENS) {
if (TOKENS[c].symbol === symbol) return Object.assign({ contract: c }, TOKENS[c]);
}
throw new Error('unknown token');
};
const getABI = (info: ContractInfo) => {
if (typeof info.abi === 'string') {
if (info.abi === 'ERC20') return ERC20;
else if (info.abi === 'ERC721') return ERC721;
else throw new Error(`getABI: unknown abi type=${info.abi}`);
}
return info.abi;
};
export type DecoderOpt = {
customContracts?: Record<string, ContractInfo>;
noDefault?: boolean; // don't add default contracts
};
// TODO: export? Seems useful enough
// We cannot have this inside decoder itself,
// since it will create dependencies on all default contracts
const getDecoder = (opt: DecoderOpt = {}) => {
const decoder = new Decoder();
const contracts: Record<string, ContractInfo> = {};
// Add contracts
if (!opt.noDefault) Object.assign(contracts, CONTRACTS);
if (opt.customContracts) {
for (const k in opt.customContracts) contracts[k.toLowerCase()] = opt.customContracts[k];
}
// Contract info validation
for (const k in contracts) {
if (!addr.isValid(k)) throw new Error(`getDecoder: invalid contract address=${k}`);
const c = contracts[k];
if (c.symbol !== undefined && typeof c.symbol !== 'string')
throw new Error(`getDecoder: wrong symbol type=${c.symbol}`);
if (c.decimals !== undefined && !Number.isSafeInteger(c.decimals))
throw new Error(`getDecoder: wrong decimals type=${c.decimals}`);
if (c.name !== undefined && typeof c.name !== 'string')
throw new Error(`getDecoder: wrong name type=${c.name}`);
if (c.price !== undefined && typeof c.price !== 'number')
throw new Error(`getDecoder: wrong price type=${c.price}`);
decoder.add(k, getABI(c)); // validates c.abi
}
return { decoder, contracts };
};
// These methods are for case when user wants to inspect tx/logs/receipt,
// but doesn't know anything about which contract is used. If you work with
// specific contract it is better to use 'createContract' which will return nice types.
// 'to' can point to specific known contract, but also can point to any address (it is part of tx)
// 'to' should be part of real tx you want to parse, not hardcoded contract!
// Even if contract is unknown, we still try to process by known function signatures
// from other contracts.
// Can be used to parse tx or 'eth_getTransactionReceipt' output
export const decodeData = (to: string, data: string, amount?: bigint, opt: DecoderOpt = {}) => {
if (!addr.isValid(to)) throw new Error(`decodeData: wrong to=${to}`);
if (amount !== undefined && typeof amount !== 'bigint')
throw new Error(`decodeData: wrong amount=${amount}`);
const { decoder, contracts } = getDecoder(opt);
return decoder.decode(to, ethHex.decode(data), {
contract: to,
contracts, // NOTE: we need whole contracts list here, since exchanges can use info about other contracts (tokens)
contractInfo: contracts[to.toLowerCase()], // current contract info (for tokens)
amount, // Amount is not neccesary, but some hints won't work without it (exchange eth to some tokens)
});
};
// Requires deps on tx, but nicer API.
// Doesn't cover all use cases of decodeData, since it can't parse 'eth_getTransactionReceipt'
export const decodeTx = (transaction: string, opt: DecoderOpt = {}) => {
const tx = Transaction.fromHex(transaction);
return decodeData(tx.raw.to, tx.raw.data, tx.raw.value, opt);
};
// Parses output of eth_getLogs/eth_getTransactionReceipt
export const decodeEvent = (to: string, topics: string[], data: string, opt: DecoderOpt = {}) => {
if (!addr.isValid(to)) throw new Error(`decodeEvent: wrong to=${to}`);
const { decoder, contracts } = getDecoder(opt);
return decoder.decodeEvent(to, topics, data, {
contract: to,
contracts,
contractInfo: contracts[to.toLowerCase()],
// amount here is not used by our hooks. Should we ask it for consistency?
});
};

41
dev/env/node_modules/micro-eth-signer/src/abi/kyber.ts generated vendored Executable file
View File

@@ -0,0 +1,41 @@
import { createDecimal } from '../utils.ts';
import { addHints } from './common.ts';
import { type HintOpt } from './decoder.ts';
// prettier-ignore
const _ABI = [
{type:"function",name:"getExpectedRate",inputs:[{name:"src",type:"address"},{name:"dest",type:"address"},{name:"srcQty",type:"uint256"}],outputs:[{name:"expectedRate",type:"uint256"},{name:"worstRate",type:"uint256"}]},{type:"function",name:"getExpectedRateAfterFee",inputs:[{name:"src",type:"address"},{name:"dest",type:"address"},{name:"srcQty",type:"uint256"},{name:"platformFeeBps",type:"uint256"},{name:"hint",type:"bytes"}],outputs:[{name:"expectedRate",type:"uint256"}]},{type:"function",name:"trade",inputs:[{name:"src",type:"address"},{name:"srcAmount",type:"uint256"},{name:"dest",type:"address"},{name:"destAddress",type:"address"},{name:"maxDestAmount",type:"uint256"},{name:"minConversionRate",type:"uint256"},{name:"platformWallet",type:"address"}],outputs:[{type:"uint256"}]},{type:"function",name:"tradeWithHint",inputs:[{name:"src",type:"address"},{name:"srcAmount",type:"uint256"},{name:"dest",type:"address"},{name:"destAddress",type:"address"},{name:"maxDestAmount",type:"uint256"},{name:"minConversionRate",type:"uint256"},{name:"walletId",type:"address"},{name:"hint",type:"bytes"}],outputs:[{type:"uint256"}]},{type:"function",name:"tradeWithHintAndFee",inputs:[{name:"src",type:"address"},{name:"srcAmount",type:"uint256"},{name:"dest",type:"address"},{name:"destAddress",type:"address"},{name:"maxDestAmount",type:"uint256"},{name:"minConversionRate",type:"uint256"},{name:"platformWallet",type:"address"},{name:"platformFeeBps",type:"uint256"},{name:"hint",type:"bytes"}],outputs:[{name:"destAmount",type:"uint256"}]}
] as const;
const _10n = BigInt(10);
const hints = {
tradeWithHintAndFee(v: any, opt: HintOpt) {
if (!opt.contracts) throw Error('Not enough info');
const tokenInfo = (c: string) =>
c === '0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee'
? { symbol: 'ETH', decimals: 18 }
: opt.contracts![c];
const formatToken = (amount: bigint, info: any) =>
`${createDecimal(info.decimals).encode(amount)} ${info.symbol}`;
const [srcInfo, destInfo] = [tokenInfo(v.src), tokenInfo(v.dest)];
if (!srcInfo || !destInfo) throw Error('Not enough info');
const destAmount =
((v.srcAmount as bigint) *
(v.minConversionRate as bigint) *
_10n ** BigInt(destInfo.decimals!)) /
_10n ** (BigInt(srcInfo.decimals!) + BigInt(18));
const fee = formatToken(
(BigInt(v.platformFeeBps) * BigInt(v.srcAmount)) / BigInt(10000),
srcInfo
);
return `Swap ${formatToken(v.srcAmount, srcInfo)} For ${formatToken(
destAmount,
destInfo
)} (with platform fee: ${fee})`;
},
};
const ABI = /* @__PURE__ */ addHints(_ABI, hints);
export default ABI;
export const KYBER_NETWORK_PROXY_CONTRACT = '0x9aab3f75489902f3a48495025729a0af77d4b11e';

89
dev/env/node_modules/micro-eth-signer/src/abi/uniswap-v2.ts generated vendored Executable file

File diff suppressed because one or more lines are too long

78
dev/env/node_modules/micro-eth-signer/src/abi/uniswap-v3.ts generated vendored Executable file

File diff suppressed because one or more lines are too long

11
dev/env/node_modules/micro-eth-signer/src/abi/weth.ts generated vendored Executable file
View File

@@ -0,0 +1,11 @@
import { addHints } from './common.ts';
import { hints as erc20hints } from './erc20.ts';
// prettier-ignore
const _ABI = [
{constant:true,inputs:[],name:"name",outputs:[{name:"",type:"string"}],payable:false,stateMutability:"view",type:"function"},{constant:false,inputs:[{name:"guy",type:"address"},{name:"wad",type:"uint256"}],name:"approve",outputs:[{name:"",type:"bool"}],payable:false,stateMutability:"nonpayable",type:"function"},{constant:true,inputs:[],name:"totalSupply",outputs:[{name:"",type:"uint256"}],payable:false,stateMutability:"view",type:"function"},{constant:false,inputs:[{name:"src",type:"address"},{name:"dst",type:"address"},{name:"wad",type:"uint256"}],name:"transferFrom",outputs:[{name:"",type:"bool"}],payable:false,stateMutability:"nonpayable",type:"function"},{constant:false,inputs:[{name:"wad",type:"uint256"}],name:"withdraw",outputs:[],payable:false,stateMutability:"nonpayable",type:"function"},{constant:true,inputs:[],name:"decimals",outputs:[{name:"",type:"uint8"}],payable:false,stateMutability:"view",type:"function"},{constant:true,inputs:[{name:"",type:"address"}],name:"balanceOf",outputs:[{name:"",type:"uint256"}],payable:false,stateMutability:"view",type:"function"},{constant:true,inputs:[],name:"symbol",outputs:[{name:"",type:"string"}],payable:false,stateMutability:"view",type:"function"},{constant:false,inputs:[{name:"dst",type:"address"},{name:"wad",type:"uint256"}],name:"transfer",outputs:[{name:"",type:"bool"}],payable:false,stateMutability:"nonpayable",type:"function"},{constant:false,inputs:[],name:"deposit",outputs:[],payable:true,stateMutability:"payable",type:"function"},{constant:true,inputs:[{name:"",type:"address"},{name:"",type:"address"}],name:"allowance",outputs:[{name:"",type:"uint256"}],payable:false,stateMutability:"view",type:"function"},{payable:true,stateMutability:"payable",type:"fallback"},{anonymous:false,inputs:[{indexed:true,name:"src",type:"address"},{indexed:true,name:"guy",type:"address"},{indexed:false,name:"wad",type:"uint256"}],name:"Approval",type:"event"},{anonymous:false,inputs:[{indexed:true,name:"src",type:"address"},{indexed:true,name:"dst",type:"address"},{indexed:false,name:"wad",type:"uint256"}],name:"Transfer",type:"event"},{anonymous:false,inputs:[{indexed:true,name:"dst",type:"address"},{indexed:false,name:"wad",type:"uint256"}],name:"Deposit",type:"event"},{anonymous:false,inputs:[{indexed:true,name:"src",type:"address"},{indexed:false,name:"wad",type:"uint256"}],name:"Withdrawal",type:"event"}
] as const;
const ABI = /* @__PURE__ */ addHints(_ABI, erc20hints);
export default ABI;
export const WETH_CONTRACT = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2';

96
dev/env/node_modules/micro-eth-signer/src/address.ts generated vendored Executable file
View File

@@ -0,0 +1,96 @@
/*! micro-eth-signer - MIT License (c) 2021 Paul Miller (paulmillr.com) */
import { secp256k1 } from '@noble/curves/secp256k1';
import { keccak_256 } from '@noble/hashes/sha3';
import { bytesToHex } from '@noble/hashes/utils';
import { add0x, astr, ethHex, strip0x } from './utils.ts';
export const addr = {
RE: /^(0[xX])?([0-9a-fA-F]{40})?$/ satisfies RegExp as RegExp,
parse: (
address: string,
allowEmpty = false
): {
hasPrefix: boolean;
data: string;
} => {
astr(address);
// NOTE: empty address allowed for 'to', but would be mistake for other address fields.
// '0x' instead of null/undefined because we don't want to send contract creation tx if user
// accidentally missed 'to' field.
if (allowEmpty && address === '0x') return { hasPrefix: true, data: '' };
const res = address.match(addr.RE) || [];
const hasPrefix = res[1] != null;
const data = res[2];
if (!data) {
const len = hasPrefix ? 42 : 40;
throw new Error(`address must be ${len}-char hex, got ${address.length}-char ${address}`);
}
return { hasPrefix, data };
},
/**
* Address checksum is calculated by hashing with keccak_256.
* It hashes *string*, not a bytearray: keccak('beef') not keccak([0xbe, 0xef])
* @param nonChecksummedAddress
* @param allowEmpty - allows '0x'
* @returns checksummed address
*/
addChecksum: (nonChecksummedAddress: string, allowEmpty = false): string => {
const low = addr.parse(nonChecksummedAddress, allowEmpty).data.toLowerCase();
const hash = bytesToHex(keccak_256(low));
let checksummed = '';
for (let i = 0; i < low.length; i++) {
const hi = Number.parseInt(hash[i], 16);
const li = low[i];
checksummed += hi <= 7 ? li : li.toUpperCase(); // if char is 9-f, upcase it
}
return add0x(checksummed);
},
/**
* Creates address from secp256k1 public key.
*/
fromPublicKey: (key: string | Uint8Array): string => {
if (!key) throw new Error('invalid public key: ' + key);
const pub65b = secp256k1.ProjectivePoint.fromHex(key).toRawBytes(false);
const hashed = keccak_256(pub65b.subarray(1, 65));
const address = bytesToHex(hashed).slice(24); // slice 24..64
return addr.addChecksum(address);
},
/**
* Creates address from ETH private key in hex or ui8a format.
*/
fromPrivateKey: (key: string | Uint8Array): string => {
if (typeof key === 'string') key = strip0x(key);
return addr.fromPublicKey(secp256k1.getPublicKey(key, false));
},
/**
* Generates hex string with new random private key and address. Uses CSPRNG internally.
*/
random(): { privateKey: string; address: string } {
const privateKey = ethHex.encode(secp256k1.utils.randomPrivateKey());
return { privateKey, address: addr.fromPrivateKey(privateKey) };
},
/**
* Verifies checksum if the address is checksummed.
* Always returns true when the address is not checksummed.
* @param allowEmpty - allows '0x'
*/
isValid: (checksummedAddress: string, allowEmpty = false): boolean => {
let parsed: { hasPrefix: boolean; data: string };
try {
parsed = addr.parse(checksummedAddress, allowEmpty);
} catch (error) {
return false;
}
const { data: address, hasPrefix } = parsed;
if (!hasPrefix) return false;
const low = address.toLowerCase();
const upp = address.toUpperCase();
if (address === low || address === upp) return true;
return addr.addChecksum(low, allowEmpty) === checksummedAddress;
},
};

293
dev/env/node_modules/micro-eth-signer/src/index.ts generated vendored Executable file
View File

@@ -0,0 +1,293 @@
/*! micro-eth-signer - MIT License (c) 2021 Paul Miller (paulmillr.com) */
import { keccak_256 } from '@noble/hashes/sha3';
import { bytesToHex, concatBytes, hexToBytes } from '@noble/hashes/utils';
import type { UnwrapCoder } from 'micro-packed';
import { addr } from './address.ts';
// prettier-ignore
import { RLP } from './rlp.ts';
import {
type AuthorizationItem,
type AuthorizationRequest,
type TxCoder,
type TxType,
RawTx,
TxVersions,
authorizationRequest,
decodeLegacyV,
removeSig,
sortRawData,
validateFields,
} from './tx.ts';
// prettier-ignore
import {
amounts, astr,
cloneDeep,
ethHex, ethHexNoLeadingZero,
initSig,
isBytes,
sign, strip0x, verify, weieth, weigwei
} from './utils.ts';
export { addr, weieth, weigwei };
// The file exports Transaction, but actual (RLP) parsing logic is done in `./tx`
/**
* EIP-7702 Authorizations
*/
export const authorization = {
_getHash(req: AuthorizationRequest): Uint8Array {
const msg = RLP.encode(authorizationRequest.decode(req));
return keccak_256(concatBytes(new Uint8Array([0x05]), msg));
},
sign(req: AuthorizationRequest, privateKey: string): AuthorizationItem {
astr(privateKey);
const sig = sign(this._getHash(req), ethHex.decode(privateKey));
return { ...req, r: sig.r, s: sig.s, yParity: sig.recovery };
},
getAuthority(item: AuthorizationItem): string {
const { r, s, yParity, ...req } = item;
const hash = this._getHash(req);
const sig = initSig({ r, s }, yParity);
const point = sig.recoverPublicKey(hash);
return addr.fromPublicKey(point.toHex(false));
},
};
// Transaction-related utils.
// 4 fields are required. Others are pre-filled with default values.
const TX_DEFAULTS = {
accessList: [], // needs to be .slice()-d to create new reference
authorizationList: [],
chainId: BigInt(1) satisfies bigint as bigint, // mainnet
data: '',
gasLimit: BigInt(21000) satisfies bigint as bigint, // TODO: investigate if limit is smaller in eip4844 txs
maxPriorityFeePerGas: (BigInt(1) * amounts.GWEI) satisfies bigint as bigint, // Reduce fingerprinting by using standard, popular value
type: 'eip1559',
} as const;
type DefaultField = keyof typeof TX_DEFAULTS;
type DefaultType = (typeof TX_DEFAULTS)['type'];
type DefaultsOptional<T> = {
[P in keyof T as P extends DefaultField ? P : never]?: T[P];
} & {
[P in keyof T as P extends DefaultField ? never : P]: T[P];
};
type HumanInputInner<T extends TxType> = DefaultsOptional<{ type: T } & TxCoder<T>>;
type HumanInputInnerDefault = DefaultsOptional<TxCoder<DefaultType>>;
type Required<T> = T extends undefined ? never : T;
type HumanInput<T extends TxType | undefined> = T extends undefined
? HumanInputInnerDefault
: HumanInputInner<Required<T>>;
type TxVersions = typeof TxVersions;
type SpecifyVersion<T extends TxType[]> = UnwrapCoder<
{
[K in keyof TxVersions]: K extends T[number] ? TxVersions[K] : never;
}[keyof TxVersions]
>;
type SpecifyVersionNeg<T extends TxType[]> = UnwrapCoder<
Exclude<
{
[K in keyof TxVersions]: TxVersions[K];
}[keyof TxVersions],
{
[K in keyof TxVersions]: K extends T[number] ? TxVersions[K] : never;
}[keyof TxVersions]
>
>;
// Changes:
// - legacy: instead of hardfork now accepts additional param chainId
// if chainId is present, we enable relay protection
// This removes hardfork param and simplifies replay protection logic
// - tx parametrized over type: you cannot access fields from different tx version
// - legacy: 'v' param is hidden in coders. Transaction operates in terms chainId and yParity.
// TODO: tx is kinda immutable, but user can change .raw values before signing
// need to think about re-validation?
export class Transaction<T extends TxType> {
readonly type: T;
readonly raw: TxCoder<T>;
readonly isSigned: boolean;
// Doesn't force any defaults, catches if fields incompatible with type
constructor(type: T, raw: TxCoder<T>, strict = true, allowSignatureFields = true) {
this.type = type;
this.raw = raw;
validateFields(type, raw, strict, allowSignatureFields);
this.isSigned = typeof raw.r === 'bigint' && typeof raw.s === 'bigint';
}
// Defaults
static prepare<T extends { type: undefined }>(
data: T & HumanInputInnerDefault,
strict?: boolean
): Transaction<(typeof TX_DEFAULTS)['type']>;
static prepare<TT extends TxType, T extends { type: TT } & HumanInput<TT>>(
data: HumanInput<TT>,
strict?: boolean
): Transaction<T['type']>;
static prepare<T extends TxType>(data: HumanInput<T>, strict = true): Transaction<T> {
const type = (data.type !== undefined ? data.type : TX_DEFAULTS.type) as T;
if (!TxVersions.hasOwnProperty(type)) throw new Error(`wrong transaction type=${type}`);
const coder = TxVersions[type];
const fields = new Set(coder.fields as string[]);
// Copy default fields, but only if the field is present on the tx type.
const raw: Record<string, any> = { type };
for (const f in TX_DEFAULTS) {
if (f !== 'type' && fields.has(f)) {
raw[f] = TX_DEFAULTS[f as DefaultField];
if (['accessList', 'authorizationList'].includes(f)) raw[f] = cloneDeep(raw[f]);
}
}
// Copy all fields, so we can validate unexpected ones.
return new Transaction(type, sortRawData(Object.assign(raw, data)), strict, false);
}
/**
* Creates transaction which sends whole account balance. Does two things:
* 1. `amount = accountBalance - maxFeePerGas * gasLimit`
* 2. `maxPriorityFeePerGas = maxFeePerGas`
*
* Every eth block sets a fee for all its transactions, called base fee.
* maxFeePerGas indicates how much gas user is able to spend in the worst case.
* If the block's base fee is 5 gwei, while user is able to spend 10 gwei in maxFeePerGas,
* the transaction would only consume 5 gwei. That means, base fee is unknown
* before the transaction is included in a block.
*
* By setting priorityFee to maxFee, we make the process deterministic:
* `maxFee = 10, maxPriority = 10, baseFee = 5` would always spend 10 gwei.
* In the end, the balance would become 0.
*
* WARNING: using the method would decrease privacy of a transfer, because
* payments for services have specific amounts, and not *the whole amount*.
* @param accountBalance - account balance in wei
* @param burnRemaining - send unspent fee to miners. When false, some "small amount" would remain
* @returns new transaction with adjusted amounts
*/
setWholeAmount(accountBalance: bigint, burnRemaining = true): Transaction<T> {
const _0n = BigInt(0);
if (typeof accountBalance !== 'bigint' || accountBalance <= _0n)
throw new Error('account balance must be bigger than 0');
const fee = this.fee;
const amountToSend = accountBalance - fee;
if (amountToSend <= _0n) throw new Error('account balance must be bigger than fee of ' + fee);
const raw = { ...this.raw, value: amountToSend };
if (!['legacy', 'eip2930'].includes(this.type) && burnRemaining) {
const r = raw as SpecifyVersionNeg<['legacy', 'eip2930']>;
r.maxPriorityFeePerGas = r.maxFeePerGas;
}
return new Transaction(this.type, raw);
}
static fromRawBytes(
bytes: Uint8Array,
strict = false
): Transaction<'legacy' | 'eip2930' | 'eip1559' | 'eip4844' | 'eip7702'> {
const raw = RawTx.decode(bytes);
return new Transaction(raw.type, raw.data, strict);
}
static fromHex(
hex: string,
strict = false
): Transaction<'eip1559' | 'legacy' | 'eip2930' | 'eip4844' | 'eip7702'> {
return Transaction.fromRawBytes(ethHexNoLeadingZero.decode(hex), strict);
}
private assertIsSigned() {
if (!this.isSigned) throw new Error('expected signed transaction');
}
/**
* Converts transaction to RLP.
* @param includeSignature whether to include signature
*/
toRawBytes(includeSignature: boolean = this.isSigned): Uint8Array {
// cloneDeep is not necessary here
let data = Object.assign({}, this.raw);
if (includeSignature) {
this.assertIsSigned();
} else {
removeSig(data);
}
return RawTx.encode({ type: this.type, data } as any); // TODO: remove any
}
/**
* Converts transaction to hex.
* @param includeSignature whether to include signature
*/
toHex(includeSignature: boolean = this.isSigned): string {
return ethHex.encode(this.toRawBytes(includeSignature));
}
/** Calculates keccak-256 hash of signed transaction. Used in block explorers. */
get hash(): string {
this.assertIsSigned();
return bytesToHex(this.calcHash(true));
}
/** Returns sender's address. */
get sender(): string {
return this.recoverSender().address;
}
/**
* For legacy transactions, but can be used with libraries when yParity presented as v.
*/
get v(): bigint | undefined {
return decodeLegacyV(this.raw);
}
private calcHash(includeSignature: boolean): Uint8Array {
return keccak_256(this.toRawBytes(includeSignature));
}
/** Calculates MAXIMUM fee in wei that could be spent. */
get fee(): bigint {
const { type, raw } = this;
// Fee calculation is not exact, real fee can be smaller
let gasFee;
if (type === 'legacy' || type === 'eip2930') {
// Because TypeScript is not smart enough to narrow down types here :(
const r = raw as SpecifyVersion<['legacy', 'eip2930']>;
gasFee = r.gasPrice;
} else {
const r = raw as SpecifyVersionNeg<['legacy', 'eip2930']>;
// maxFeePerGas is absolute limit, you never pay more than that
// maxFeePerGas = baseFeePerGas[*2] + maxPriorityFeePerGas
gasFee = r.maxFeePerGas;
}
// TODO: how to calculate 4844 fee?
return raw.gasLimit * gasFee;
}
clone(): Transaction<T> {
return new Transaction(this.type, cloneDeep(this.raw));
}
verifySignature(): boolean {
this.assertIsSigned();
const { r, s } = this.raw;
return verify(
{ r: r!, s: s! },
this.calcHash(false),
hexToBytes(this.recoverSender().publicKey)
);
}
removeSignature(): Transaction<T> {
return new Transaction(this.type, removeSig(cloneDeep(this.raw)));
}
/**
* Signs transaction with a private key.
* @param privateKey key in hex or Uint8Array format
* @param opts extraEntropy will increase security of sig by mixing rfc6979 randomness
* @returns new "same" transaction, but signed
*/
signBy(
privateKey: string | Uint8Array,
extraEntropy: boolean | Uint8Array = true
): Transaction<T> {
if (this.isSigned) throw new Error('expected unsigned transaction');
const priv = isBytes(privateKey) ? privateKey : hexToBytes(strip0x(privateKey));
const hash = this.calcHash(false);
const { r, s, recovery } = sign(hash, priv, extraEntropy);
const sraw = Object.assign(cloneDeep(this.raw), { r, s, yParity: recovery });
// The copied result is validated in non-strict way, strict is only for user input.
return new Transaction(this.type, sraw, false);
}
/** Calculates public key and address from signed transaction's signature. */
recoverSender(): { publicKey: string; address: string } {
this.assertIsSigned();
const { r, s, yParity } = this.raw;
const sig = initSig({ r: r!, s: s! }, yParity!);
// Will crash on 'chainstart' hardfork
if (sig.hasHighS()) throw new Error('invalid s');
const point = sig.recoverPublicKey(this.calcHash(false));
return { publicKey: point.toHex(true), address: addr.fromPublicKey(point.toHex(false)) };
}
}

341
dev/env/node_modules/micro-eth-signer/src/kzg.ts generated vendored Executable file
View File

@@ -0,0 +1,341 @@
import { bitLen, bytesToNumberBE, numberToBytesBE } from '@noble/curves/abstract/utils';
import { bls12_381 as bls } from '@noble/curves/bls12-381';
import { sha256 } from '@noble/hashes/sha256';
import { bytesToHex, utf8ToBytes } from '@noble/hashes/utils';
import { add0x, hexToNumber, strip0x } from './utils.ts';
/*
KZG for [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844).
Docs:
- https://github.com/ethereum/c-kzg-4844
- https://github.com/ethereum/consensus-specs/blob/dev/specs/deneb/polynomial-commitments.md
TODO(high-level):
- data converted into blob by prepending 0x00 prefix on each chunk and ends with 0x80 terminator
- Unsure how generic is this
- There are up to 6 blob per tx
- Terminator only added to the last blob
- sidecar: {blob, commitment, proof}
- Calculate versionedHash from commitment, which is included inside of tx
- if 'sidecars' inside of tx enabled:
- envelope turns into 'wrapper'
- rlp([tx, blobs, commitments, proofs])
- this means there are two eip4844 txs: with sidecars and without
TODO(EIP7594):
https://eips.ethereum.org/EIPS/eip-7594
compute_cells_and_kzg_proofs(cells, proofs, blob);
recover_cells_and_kzg_proofs(recovered_cells, recovered_proofs, cell_indices, cells, num_cells);
verify_cell_kzg_proof_batch(commitments_bytes, cell_indices, cells, proofs_bytes, num_cells);
*/
const { Fr, Fp12 } = bls.fields;
const G1 = bls.G1.ProjectivePoint;
const G2 = bls.G2.ProjectivePoint;
type G1Point = typeof bls.G1.ProjectivePoint.BASE;
type G2Point = typeof bls.G2.ProjectivePoint.BASE;
type Scalar = string | bigint;
type Blob = string | string[] | bigint[];
const BLOB_REGEX = /.{1,64}/g; // TODO: is this valid?
function parseScalar(s: Scalar): bigint {
if (typeof s === 'string') {
s = strip0x(s);
if (s.length !== 2 * Fr.BYTES) throw new Error('parseScalar: wrong format');
s = BigInt(`0x${s}`);
}
if (!Fr.isValid(s)) throw new Error('parseScalar: invalid field element');
return s;
}
function formatScalar(n: bigint) {
return add0x(bytesToHex(numberToBytesBE(n, Fr.BYTES)));
}
function isPowerOfTwo(x: number) {
return (x & (x - 1)) === 0 && x !== 0;
}
function reverseBits(n: number, bits: number): number {
let reversed = 0;
for (let i = 0; i < bits; i++, n >>>= 1) reversed = (reversed << 1) | (n & 1);
return reversed;
}
// FFTish stuff, reverses bit in index
function bitReversalPermutation<T>(values: T[]): T[] {
const n = values.length;
if (n < 2 || !isPowerOfTwo(n))
throw new Error(`n must be a power of 2 and greater than 1. Got ${n}`);
const bits = bitLen(BigInt(n)) - 1;
const res = new Array(n);
for (let i = 0; i < n; i++) res[reverseBits(i, bits)] = values[i];
return res;
}
function computeRootsOfUnity(count: number) {
if (count < 2) throw new Error('expected at least two roots');
const PRIMITIVE_ROOT_OF_UNITY = 7;
const order = BigInt(Math.log2(count));
const power = (Fr.ORDER - BigInt(1)) / BigInt(2) ** order;
const ROOT = Fr.pow(BigInt(PRIMITIVE_ROOT_OF_UNITY), power);
const roots = [Fr.ONE, ROOT];
for (let i = 2; i <= count; i++) {
roots[i] = Fr.mul(roots[i - 1], ROOT);
if (Fr.eql(roots[i], Fr.ONE)) break;
}
if (!Fr.eql(roots[roots.length - 1], Fr.ONE)) throw new Error('last root should be 1');
roots.pop();
if (roots.length !== count) throw new Error('invalid amount of roots');
return bitReversalPermutation(roots);
}
function pairingVerify(a1: G1Point, a2: G2Point, b1: G1Point, b2: G2Point) {
// Filter-out points at infinity, because pairingBatch will throw an error
const pairs = [
{ g1: a1.negate(), g2: a2 },
{ g1: b1, g2: b2 },
].filter(({ g1, g2 }) => !G1.ZERO.equals(g1) && !G2.ZERO.equals(g2));
const f = bls.pairingBatch(pairs, true);
return Fp12.eql(f, Fp12.ONE);
}
// Official JSON format
export type SetupData = {
// g1_monomial: string[]; // Not needed until EIP7594 is live
g1_lagrange: string[];
g2_monomial: string[];
};
/**
* KZG from [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844).
* @example
* const kzg = new KZG(trustedSetupData);
*/
export class KZG {
private readonly POLY_NUM: number;
private readonly G1LB: G1Point[]; // lagrange brp
private readonly G2M: G2Point[];
private readonly ROOTS_OF_UNITY: bigint[];
// Should they be configurable?
private readonly FIAT_SHAMIR_PROTOCOL_DOMAIN = utf8ToBytes('FSBLOBVERIFY_V1_');
private readonly RANDOM_CHALLENGE_KZG_BATCH_DOMAIN = utf8ToBytes('RCKZGBATCH___V1_');
private readonly POLY_NUM_BYTES: Uint8Array;
constructor(setup: SetupData & { encoding?: 'fast_v1' }) {
if (setup == null || typeof setup !== 'object') throw new Error('expected valid setup data');
if (!Array.isArray(setup.g1_lagrange) || !Array.isArray(setup.g2_monomial))
throw new Error('expected valid setup data');
// The slowest part
let fastSetup = false;
if ('encoding' in setup) {
fastSetup = setup.encoding === 'fast_v1';
if (!fastSetup) throw new Error('unknown encoding ' + setup.encoding);
}
const G1L = setup.g1_lagrange.map(fastSetup ? this.parseG1Unchecked : this.parseG1);
this.POLY_NUM = G1L.length;
this.G2M = setup.g2_monomial.map(fastSetup ? this.parseG2Unchecked : this.parseG2);
this.G1LB = bitReversalPermutation(G1L);
this.ROOTS_OF_UNITY = computeRootsOfUnity(this.POLY_NUM);
this.POLY_NUM_BYTES = numberToBytesBE(this.POLY_NUM, 8);
}
// Internal
private parseG1(p: string | G1Point) {
if (typeof p === 'string') p = G1.fromHex(strip0x(p));
return p;
}
private parseG1Unchecked(p: string) {
if (typeof p !== 'string') throw new Error('string expected');
const [x, y] = p.split(' ').map(hexToNumber);
return G1.fromAffine({ x, y });
}
private parseG2(p: string) {
return G2.fromHex(strip0x(p));
}
private parseG2Unchecked(p: string) {
const xy = strip0x(p)
.split(' ')
.map((c) => c.split(',').map((c) => BigInt('0x' + c))) as unknown as [bigint, bigint][];
const x = bls.fields.Fp2.fromBigTuple(xy[0]);
const y = bls.fields.Fp2.fromBigTuple(xy[1]);
return G2.fromAffine({ x, y });
}
private parseBlob(blob: Blob) {
if (typeof blob === 'string') {
blob = strip0x(blob);
if (blob.length !== this.POLY_NUM * Fr.BYTES * 2) throw new Error('Wrong blob length');
const m = blob.match(BLOB_REGEX);
if (!m) throw new Error('Wrong blob');
blob = m;
}
return blob.map(parseScalar);
}
private invSafe(inverses: bigint[]) {
inverses = Fr.invertBatch(inverses);
for (const i of inverses) if (i === undefined) throw new Error('invSafe: division by zero');
return inverses;
}
private G1msm(points: G1Point[], scalars: bigint[]) {
// Filters zero scalars, non-const time, but improves computeProof up to x93 for empty blobs
const _points = [];
const _scalars = [];
for (let i = 0; i < scalars.length; i++) {
const s = scalars[i];
if (Fr.is0(s)) continue;
_points.push(points[i]);
_scalars.push(s);
}
return G1.msm(_points, _scalars);
}
private computeChallenge(blob: bigint[], commitment: G1Point): bigint {
const h = sha256
.create()
.update(this.FIAT_SHAMIR_PROTOCOL_DOMAIN)
.update(numberToBytesBE(0, 8))
.update(this.POLY_NUM_BYTES);
for (const b of blob) h.update(numberToBytesBE(b, Fr.BYTES));
h.update(commitment.toRawBytes(true));
const res = Fr.create(bytesToNumberBE(h.digest()));
h.destroy();
return res;
}
// Evaluate polynominal at the point x
private evalPoly(poly: bigint[], x: bigint) {
if (poly.length !== this.POLY_NUM) throw new Error('The polynomial length is incorrect');
const batch = [];
for (let i = 0; i < this.POLY_NUM; i++) {
// This enforces that we don't try inverse of zero here
if (Fr.eql(x, this.ROOTS_OF_UNITY[i])) return poly[i];
batch.push(Fr.sub(x, this.ROOTS_OF_UNITY[i]));
}
const inverses = this.invSafe(batch);
let res = Fr.ZERO;
for (let i = 0; i < this.POLY_NUM; i++)
res = Fr.add(res, Fr.mul(Fr.mul(inverses[i], this.ROOTS_OF_UNITY[i]), poly[i]));
res = Fr.div(res, Fr.create(BigInt(this.POLY_NUM)));
res = Fr.mul(res, Fr.sub(Fr.pow(x, BigInt(this.POLY_NUM)), Fr.ONE));
return res;
}
// Basic
computeProof(blob: Blob, z: bigint | string): [string, string] {
z = parseScalar(z);
blob = this.parseBlob(blob);
const y = this.evalPoly(blob, z);
const batch = [];
let rootOfUnityPos: undefined | number;
const poly = new Array(this.POLY_NUM).fill(Fr.ZERO);
for (let i = 0; i < this.POLY_NUM; i++) {
if (Fr.eql(z, this.ROOTS_OF_UNITY[i])) {
rootOfUnityPos = i;
batch.push(Fr.ONE);
continue;
}
poly[i] = Fr.sub(blob[i], y);
batch.push(Fr.sub(this.ROOTS_OF_UNITY[i], z));
}
const inverses = this.invSafe(batch);
for (let i = 0; i < this.POLY_NUM; i++) poly[i] = Fr.mul(poly[i], inverses[i]);
if (rootOfUnityPos !== undefined) {
poly[rootOfUnityPos] = Fr.ZERO;
for (let i = 0; i < this.POLY_NUM; i++) {
if (i === rootOfUnityPos) continue;
batch[i] = Fr.mul(Fr.sub(z, this.ROOTS_OF_UNITY[i]), z);
}
const inverses = this.invSafe(batch);
for (let i = 0; i < this.POLY_NUM; i++) {
if (i === rootOfUnityPos) continue;
poly[rootOfUnityPos] = Fr.add(
poly[rootOfUnityPos],
Fr.mul(Fr.mul(Fr.sub(blob[i], y), this.ROOTS_OF_UNITY[i]), inverses[i])
);
}
}
const proof = add0x(this.G1msm(this.G1LB, poly).toHex(true));
return [proof, formatScalar(y)];
}
verifyProof(commitment: string, z: Scalar, y: Scalar, proof: string): boolean {
try {
z = parseScalar(z);
y = parseScalar(y);
const g2x = Fr.is0(z) ? G2.ZERO : G2.BASE.multiply(z);
const g1y = Fr.is0(y) ? G1.ZERO : G1.BASE.multiply(y);
const XminusZ = this.G2M[1].subtract(g2x);
const PminusY = this.parseG1(commitment).subtract(g1y);
return pairingVerify(PminusY, G2.BASE, this.parseG1(proof), XminusZ);
} catch (e) {
return false;
}
}
// There are no test vectors for this
private verifyProofBatch(commitments: G1Point[], zs: bigint[], ys: bigint[], proofs: string[]) {
const n = commitments.length;
const p: G1Point[] = proofs.map((i) => this.parseG1(i));
const h = sha256
.create()
.update(this.RANDOM_CHALLENGE_KZG_BATCH_DOMAIN)
.update(this.POLY_NUM_BYTES)
.update(numberToBytesBE(n, 8));
for (let i = 0; i < n; i++) {
h.update(commitments[i].toRawBytes(true));
h.update(Fr.toBytes(zs[i]));
h.update(Fr.toBytes(ys[i]));
h.update(p[i].toRawBytes(true));
}
const r = Fr.create(bytesToNumberBE(h.digest()));
h.destroy();
const rPowers = [];
if (n !== 0) {
rPowers.push(Fr.ONE);
for (let i = 1; i < n; i++) rPowers[i] = Fr.mul(rPowers[i - 1], r);
}
const proofPowers = this.G1msm(p, rPowers);
const CminusY = commitments.map((c, i) =>
c.subtract(Fr.is0(ys[i]) ? G1.ZERO : G1.BASE.multiply(ys[i]))
);
const RtimesZ = rPowers.map((p, i) => Fr.mul(p, zs[i]));
const rhs = this.G1msm(p.concat(CminusY), RtimesZ.concat(rPowers));
return pairingVerify(proofPowers, this.G2M[1], rhs, G2.BASE);
}
// Blobs
blobToKzgCommitment(blob: Blob): string {
return add0x(this.G1msm(this.G1LB, this.parseBlob(blob)).toHex(true));
}
computeBlobProof(blob: Blob, commitment: string): string {
blob = this.parseBlob(blob);
const challenge = this.computeChallenge(blob, G1.fromHex(strip0x(commitment)));
const [proof, _] = this.computeProof(blob, challenge);
return proof;
}
verifyBlobProof(blob: Blob, commitment: string, proof: string): boolean {
try {
blob = this.parseBlob(blob);
const c = G1.fromHex(strip0x(commitment));
const challenge = this.computeChallenge(blob, c);
const y = this.evalPoly(blob, challenge);
return this.verifyProof(commitment, challenge, y, proof);
} catch (e) {
return false;
}
}
verifyBlobProofBatch(blobs: string[], commitments: string[], proofs: string[]): boolean {
if (!Array.isArray(blobs) || !Array.isArray(commitments) || !Array.isArray(proofs))
throw new Error('invalid arguments');
if (blobs.length !== commitments.length || blobs.length !== proofs.length) return false;
if (blobs.length === 1) return this.verifyBlobProof(blobs[0], commitments[0], proofs[0]);
try {
const b = blobs.map((i) => this.parseBlob(i));
const c = commitments.map((i) => G1.fromHex(strip0x(i)));
const challenges = b.map((b, i) => this.computeChallenge(b, c[i]));
const ys = b.map((_, i) => this.evalPoly(b[i], challenges[i]));
return this.verifyProofBatch(c, challenges, ys, proofs);
} catch (e) {
return false;
}
}
// High-level method
// commitmentToVersionedHash(commitment: Uint8Array) {
// const VERSION = 1; // Currently only 1 version is supported
// // commitment is G1 point in hex?
// return concatBytes(new Uint8Array([VERSION]), sha256(commitment));
// }
}

986
dev/env/node_modules/micro-eth-signer/src/net/archive.ts generated vendored Executable file
View File

@@ -0,0 +1,986 @@
import { ERC1155, ERC20, ERC721, WETH, createContract, events } from '../abi/index.ts';
import { Transaction } from '../index.ts';
import { TxVersions, legacySig, type AccessList } from '../tx.ts';
import { amounts, ethHex, hexToNumber, type IWeb3Provider, type Web3CallArgs } from '../utils.ts';
/*
Methods to fetch list of transactions from any ETH node RPC.
It should be easy. However, this is sparta^W ethereum, so, prepare to suffer.
The network is not directly called: `ArchiveNodeProvider#rpc` calls `Web3Provider`.
- There is no simple & fast API inside nodes, all external API create their own namespace for this
- API is different between nodes: erigon uses streaming, other nodes use pagination
- Recently, Erigon have been also adding pagination
- For token transactions: download block headers, look at bloom filter, download affected blocks
- There is a good `getLogs` API for contracts, but nothing for ETH transfers
- `trace_filter` is slow: it not only finds the transaction, but also executes them
- It's good that it allows to get internal transactions
- The whole thing could be 10x simpler if there was an event in logs for ETH transfer
- For most cases, we only need to see last transactions and know blocks of last txs, which is 20x faster
- This creates a lot of requests to node (2 per tx, 1 per block, and some more depends on block range limits)
Recommended software:
- eth-nodes-for-rent are bad, because of their limits and timeouts
- erigon nodes are fast, taking ~15 seconds per batch
- reth has 100-block limit for trace_filter, requiring 190k requests just get transactions
*/
// Utils
const ethNum = (n: number | bigint | undefined) =>
`0x${!n ? '0' : n.toString(16).replace(/^0+/, '')}`;
const ERC_TRANSFER = events(ERC20).Transfer;
const WETH_DEPOSIT = events(WETH).Deposit;
const WETH_WITHDRAW = events(WETH).Withdrawal;
const ERC721_TRANSFER = events(ERC721).Transfer;
const ERC1155_SINGLE = events(ERC1155).TransferSingle;
const ERC1155_BATCH = events(ERC1155).TransferBatch;
const ERC165 = [
// function supportsInterface(bytes4 interfaceID) external view returns (bool);
{
type: 'function',
name: 'supportsInterface',
inputs: [{ name: 'interfaceID', type: 'bytes4' }],
outputs: [{ type: 'bool' }],
},
] as const;
const CONTRACT_CAPABILITIES: Record<string, string> = {
erc165: '0x01ffc9a7',
erc165_check: '0xffffffff',
erc20: '0x36372b07',
erc721: '0x80ac58cd',
erc721_metadata: '0x5b5e139f',
erc721_enumerable: '0x780e9d63',
erc1155: '0xd9b67a26',
erc1155_tokenreceiver: '0x4e2312e0',
erc1155_metadata: '0x0e89341c',
};
function group<T>(items: T[], s: string | ((i: T) => string)): Record<string, T[]> {
let res: Record<string, T[]> = {};
for (let i of items) {
const key = typeof s === 'function' ? s(i) : (i as any)[s];
if (!res[key]) res[key] = [];
res[key].push(i);
}
return res;
}
// Output types
export type BlockInfo = {
baseFeePerGas: bigint;
difficulty: bigint;
extraData: string;
gasLimit: bigint;
gasUsed: bigint;
hash: string;
logsBloom: string;
miner: string;
mixHash: string;
nonce: string;
number: number;
parentHash: string;
receiptsRoot: string;
sha3Uncles: string;
size: number;
stateRoot: string;
timestamp: number;
totalDifficulty?: bigint;
transactions: string[]; // transaction hashes (if false)
transactionsRoot: string;
uncles: string[];
};
export type Action = {
action: {
from: string;
callType: string;
gas: bigint;
input: string;
to: string;
value: bigint;
};
blockHash: string;
blockNumber: number;
result: { gasUsed: bigint; output: string };
subtraces: number;
traceAddress: string[];
transactionHash: string;
transactionPosition: number;
type: string;
};
export type Log = {
address: string;
topics: string[];
data: string;
blockNumber: number;
transactionHash: string;
transactionIndex: number;
blockHash: string;
logIndex: number;
removed: boolean;
};
export type TxInfo = {
blockHash: string;
blockNumber: number;
hash: string;
accessList?: AccessList;
transactionIndex: number;
type: number;
nonce: bigint;
input: string;
r: bigint;
s: bigint;
chainId: bigint;
v: bigint;
yParity?: string;
gas: bigint;
maxPriorityFeePerGas?: bigint;
from: string;
to: string;
maxFeePerGas?: bigint;
value: bigint;
gasPrice: bigint;
// blobs
maxFeePerBlobGas?: bigint;
blobVersionedHashes?: string[];
};
export type TxReceipt = {
transactionHash: string;
blockHash: string;
blockNumber: number;
logsBloom: string;
gasUsed: bigint;
contractAddress: string | null;
cumulativeGasUsed: bigint;
transactionIndex: number;
from: string;
to: string;
type: number;
effectiveGasPrice: bigint;
logs: Log[];
status: number;
blobGasPrice?: bigint;
blobGasUsed?: bigint;
};
export type Unspent = {
symbol: 'ETH';
decimals: number;
balance: bigint;
nonce: number;
// useful for wallets to know if there was transactions related to wallet
// NOTE: even if nonce is zero, there can be transfers to wallet
// can be used to check before fetching all transactions
active: boolean;
};
type ERC20Token = {
abi: 'ERC20';
name?: string;
symbol?: string;
decimals?: number;
totalSupply: bigint;
};
type ERC721Token = {
abi: 'ERC721';
name?: string;
symbol?: string;
totalSupply?: bigint;
enumerable?: boolean;
metadata?: boolean;
};
type ERC1155Token = { abi: 'ERC1155' };
export type TokenInfo = { contract: string } & (ERC20Token | ERC721Token | ERC1155Token);
// Main idea: there is broken contracts that behave strange, instead of crashing we return this error.
// Separate error allows easily to discriminate between "strange contract" and "input invalid" | "network error"
// We still want to crash on network errors, but if there is per-contract error it is better to continue batched request
// like tokenTransfers
type TokenError = { contract: string; error: string };
type TokenBalanceSingle = Map<bigint, bigint>;
// This is unified type for ERC-20 | ERC-721 | ERC-1155
// ERC20: Record<contractAddress, Map<1n, tokenValue>> - ERC-20 has only single tokenId (always!)
// ERC721: Record<contractAddress, Map<tokenId, 1n>> - ERC-721 every tokenId is 0n or 1n
// ERC-1155: Record<contractAddress, Map<tokenId, tokenValue> - ERC-1155 each tokenId can have value (merge of ERC-20 and ERC-721)
export type TokenBalances = Record<string, TokenBalanceSingle | TokenError>;
export type Topics = (string | null | (string | null)[])[];
export type Transfer = { from: string; to?: string; value: bigint };
// tokens: Map<tokenId, value> (same as balances!)
export type TokenTransfer = TokenInfo & { from: string; to: string; tokens: Map<bigint, bigint> };
export type TxTransfers = {
// This is most interesting info about tx for wallets
hash: string;
timestamp?: number;
block?: number;
transfers: Transfer[];
tokenTransfers: TokenTransfer[];
reverted: boolean;
// This contains everything about tx in raw format
info: {
type: keyof typeof TxVersions;
info: TxInfo;
receipt: TxReceipt;
raw?: string;
block: BlockInfo;
actions: Action[];
};
};
/**
* Callbacks are needed, because we want to call getTx / getBlock / getTokenInfo
* requests as fast as possible, to reduce amount of sequential execution.
* If we retrieve 10 pages of transactions, we can call per tx
* callbacks for transaction from first page before all other pages fetched.
*
* Ensure caching: they can be called multiple times for same tx / block.
*/
export type Callbacks = {
txCallback?: (txHash: string) => void;
blockCallback?: (blockNum: number) => void;
contractCallback?: (contrct: string) => void;
};
export type Pagination = { fromBlock?: number; toBlock?: number };
export type TraceOpts = Callbacks &
Pagination & {
perRequest?: number;
limitTrace?: number;
};
export type LogOpts = Callbacks &
(
| Pagination
| {
fromBlock: number;
toBlock: number;
limitLogs: number; // limit block range per request
}
);
export type Balances = {
balances: Record<string, bigint>;
tokenBalances: Record<string, Record<string, bigint>>;
};
export type TxInfoOpts = Callbacks & { ignoreTxRebuildErrors?: boolean };
export type TxAllowances = Record<string, Record<string, bigint>>;
function fixBlock(block: BlockInfo) {
block.timestamp = Number(block.timestamp) * 1000;
block.size = Number(block.size);
if (block.number && block.number !== null) block.number = Number(block.number);
for (const i of [
'baseFeePerGas',
'difficulty',
'gasLimit',
'gasUsed',
'totalDifficulty',
] as const) {
if (block[i] && block[i] !== null) block[i] = BigInt(block[i]);
}
}
function fixAction(action: Action, opts: Callbacks = {}) {
action.action.value = BigInt(action.action.value);
action.action.gas = BigInt(action.action.gas);
action.result.gasUsed = BigInt(action.result.gasUsed);
if (opts.txCallback) opts.txCallback(action.transactionHash);
if (opts.blockCallback) opts.blockCallback(action.blockNumber);
}
// Fixes type of network response inplace
function fixLog(log: Log, opts: Callbacks = {}) {
log.blockNumber = Number(log.blockNumber);
log.transactionIndex = Number(log.transactionIndex);
log.logIndex = Number(log.logIndex);
if (opts.txCallback) opts.txCallback(log.transactionHash);
if (opts.blockCallback) opts.blockCallback(log.blockNumber);
if (opts.contractCallback) opts.contractCallback(log.address);
return log;
}
function fixTxInfo(info: TxInfo) {
for (const i of ['blockNumber', 'type', 'transactionIndex'] as const) info[i] = Number(info[i]);
for (const i of [
'nonce',
'r',
's',
'chainId',
'v',
'gas',
'maxPriorityFeePerGas',
'maxFeePerGas',
'value',
'gasPrice',
'maxFeePerBlobGas',
] as const) {
if (info[i] !== undefined && info[i] !== null) info[i] = BigInt(info[i]!);
}
return info;
}
function fixTxReceipt(receipt: TxReceipt) {
for (const i of ['blockNumber', 'type', 'transactionIndex', 'status'] as const)
receipt[i] = Number(receipt[i]);
for (const i of [
'gasUsed',
'cumulativeGasUsed',
'effectiveGasPrice',
'blobGasPrice',
'blobGasUsed',
] as const) {
if (receipt[i] !== undefined) receipt[i] = BigInt(receipt[i]!);
}
for (const log of receipt.logs) fixLog(log);
return receipt;
}
function validateCallbacks(opts: Record<string, unknown>) {
for (const i of ['txCallback', 'blockCallback', 'contractCallback']) {
if (opts[i] !== undefined && typeof opts[i] !== 'function')
throw new Error(`validateCallbacks: ${i} should be function`);
}
}
function validatePagination(opts: Record<string, unknown>) {
for (const i of ['fromBlock', 'toBlock']) {
if (opts[i] === undefined || Number.isSafeInteger(opts[i])) continue;
throw new Error(
`validatePagination: wrong field ${i}=${opts[i]}. Should be integer or undefined`
);
}
}
function validateTraceOpts(opts: Record<string, unknown>) {
validatePagination(opts);
for (const i of ['perRequest', 'limitTrace']) {
if (opts[i] === undefined || Number.isSafeInteger(opts[i])) continue;
throw new Error(
`validateTraceOpts: wrong field ${i}=${opts[i]}. Should be integer or undefined`
);
}
if (opts.limitTrace !== undefined) {
if (opts.fromBlock === undefined || opts.toBlock === undefined)
throw new Error('validateTraceOpts: fromBlock/toBlock required if limitTrace is present');
}
validateCallbacks(opts);
}
function validateLogOpts(opts: Record<string, unknown>) {
validatePagination(opts);
for (const i of ['limitLogs']) {
if (opts[i] === undefined || Number.isSafeInteger(opts[i])) continue;
throw new Error(`validateLogOpts: wrong field ${i}=${opts[i]}. Should be integer or undefined`);
}
if (opts.limitLogs !== undefined) {
if (opts.fromBlock === undefined || opts.toBlock === undefined)
throw new Error('validateLogOpts: fromBlock/toBlock required if limitLogs is present');
}
validateCallbacks(opts);
}
export type JsonrpcInterface = {
call: (method: string, ...args: any[]) => Promise<any>;
};
// Promise.all for objects, undefined if error
async function wait<T extends Record<string, Promise<any>>>(
obj: T
): Promise<{ [K in keyof T]?: T[K] extends Promise<infer R> ? R : never }> {
const keys = Object.keys(obj) as (keyof T)[];
const p = await Promise.allSettled(Object.values(obj));
const res = p.map((r, i) => [keys[i], r.status === 'fulfilled' ? r.value : undefined]);
return Object.fromEntries(res) as { [K in keyof T]?: T[K] extends Promise<infer R> ? R : never };
}
const isReverted = (e: Error) => e instanceof Error && e.message.toLowerCase().includes('revert');
/**
* Transaction-related code around Web3Provider.
* High-level methods are `height`, `unspent`, `transfers`, `allowances` and `tokenBalances`.
*
* Low-level methods are `blockInfo`, `internalTransactions`, `ethLogs`, `tokenTransfers`, `wethTransfers`,
* `tokenInfo` and `txInfo`.
*/
export class Web3Provider implements IWeb3Provider {
private rpc: JsonrpcInterface;
constructor(rpc: JsonrpcInterface) {
this.rpc = rpc;
}
call(method: string, ...args: any[]): Promise<any> {
return this.rpc.call(method, ...args);
}
ethCall(args: Web3CallArgs, tag = 'latest'): Promise<any> {
return this.rpc.call('eth_call', args, tag);
}
async estimateGas(args: Web3CallArgs, tag = 'latest'): Promise<bigint> {
return hexToNumber(await this.rpc.call('eth_estimateGas', args, tag));
}
// Timestamp is available only inside blocks
async blockInfo(block: number): Promise<BlockInfo> {
const res = await this.call('eth_getBlockByNumber', ethNum(block), false);
fixBlock(res);
return res;
}
async unspent(address: string): Promise<Unspent> {
let [balance, nonce] = await Promise.all([
this.call('eth_getBalance', address, 'latest'),
this.call('eth_getTransactionCount', address, 'latest'),
]);
balance = BigInt(balance);
nonce = BigInt(nonce);
return {
symbol: 'ETH',
decimals: amounts.ETH_PRECISION,
balance,
nonce,
// Note: account can be active even if nonce!==0!
active: balance > 0 || nonce !== 0,
};
}
async height(): Promise<number> {
return Number.parseInt(await this.call('eth_blockNumber'));
}
async traceFilterSingle(address: string, opts: TraceOpts = {}): Promise<any> {
const res = await this.call('trace_filter', {
fromBlock: ethNum(opts.fromBlock),
toBlock: ethNum(opts.toBlock),
toAddress: [address],
fromAddress: [address],
});
for (const action of res) fixAction(action, opts);
return res;
}
async internalTransactions(address: string, opts: TraceOpts = {}): Promise<any[]> {
if (typeof address !== 'string') throw new Error('internalTransactions: wrong address');
validateTraceOpts(opts);
// For reth
if (opts.limitTrace) {
const promises = [];
for (let i = opts.fromBlock!; i <= opts.toBlock!; i += opts.limitTrace)
promises.push(
this.traceFilterSingle(address, { fromBlock: i, toBlock: i + opts.limitTrace })
);
const out = [];
for (const i of await Promise.all(promises)) out.push(...i);
return out;
}
let lastBlock = opts.fromBlock || 0;
const perBlock: Record<number, number> = {};
const out: Action[] = [];
for (;;) {
const params: Record<string, any> = {
fromBlock: ethNum(lastBlock),
toAddress: [address],
fromAddress: [address],
after: perBlock[lastBlock] || 0, // we cannot just store after, since fromBlock changes to last block
};
if (opts.toBlock !== undefined) params.toBlock = ethNum(opts.toBlock);
if (opts.perRequest !== undefined) params.count = opts.perRequest;
const res = await this.call('trace_filter', params);
if (!res.length) break;
for (const action of res) {
fixAction(action, opts);
if (perBlock[action.blockNumber] === undefined) perBlock[action.blockNumber] = 0;
perBlock[action.blockNumber]++;
out.push(action);
lastBlock = Math.max(lastBlock, action.blockNumber);
}
}
return out;
}
async contractCapabilities(
address: string,
capabilities: typeof CONTRACT_CAPABILITIES = {}
): Promise<{
[k: string]: boolean;
}> {
const all = { ...CONTRACT_CAPABILITIES, ...capabilities };
let c = createContract(ERC165, this, address);
const keys = Object.keys(all);
// TODO: what about revert?
// if reverted -> all capabilities disabled
try {
const promises = await Promise.all(
Object.values(all).map((i) => c.supportsInterface.call(ethHex.decode(i)))
);
const res = Object.fromEntries(keys.map((k, i) => [k, promises[i]]));
// if somehow there is same method, but it doesn't support erc165, then it is different method!
// erc165_check if sailsafe when there is method that always returns true
if (!res.erc165 || res.erc165_check) for (const k in res) res[k] = false;
return res;
} catch (e) {
// If execution reverted: contract doesn't support ERC165
if (isReverted(e as Error)) return Object.fromEntries(keys.map((k) => [k, false]));
throw e;
}
}
async ethLogsSingle(topics: Topics, opts: LogOpts): Promise<Log[]> {
const req: Record<string, any> = { topics, fromBlock: ethNum(opts.fromBlock || 0) };
if (opts.toBlock !== undefined) req.toBlock = ethNum(opts.toBlock);
const res = await this.call('eth_getLogs', req);
return res.map((i: any) => fixLog(i, opts));
}
async ethLogs(topics: Topics, opts: LogOpts = {}): Promise<Log[]> {
validateLogOpts(opts);
const fromBlock = opts.fromBlock || 0;
if (!('limitLogs' in opts)) return this.ethLogsSingle(topics, opts);
const promises = [];
for (let i = fromBlock; i <= opts.toBlock; i += opts.limitLogs)
promises.push(this.ethLogsSingle(topics, { fromBlock: i, toBlock: i + opts.limitLogs }));
const out = [];
for (const i of await Promise.all(promises)) out.push(...i);
return out;
}
// NOTE: this is very low-level methods that return parts used for .transfers method,
// you will need to decode data yourself.
async tokenTransfers(address: string, opts: LogOpts = {}): Promise<[Log[], Log[]]> {
if (typeof address !== 'string') throw new Error('tokenTransfers: wrong address');
validateLogOpts(opts);
// If we want incoming and outgoing token transfers we need to call both
return await Promise.all([
this.ethLogs(ERC_TRANSFER.topics({ from: address, to: null, value: null }), opts), // From
this.ethLogs(ERC_TRANSFER.topics({ from: null, to: address, value: null }), opts), // To
]);
}
async wethTransfers(address: string, opts: LogOpts = {}): Promise<[Log[]]> {
if (typeof address !== 'string') throw new Error('tokenTransfers: wrong address');
validateLogOpts(opts);
const depositTopic = WETH_DEPOSIT.topics({ dst: address, wad: null });
const withdrawTopic = WETH_WITHDRAW.topics({ src: address, wad: null });
// OR query
return await Promise.all([
this.ethLogs([[depositTopic[0], withdrawTopic[0]], depositTopic[1]], opts),
]);
}
async erc1155Transfers(
address: string,
opts: LogOpts = {}
): Promise<[Log[], Log[], Log[], Log[]]> {
if (typeof address !== 'string') throw new Error('tokenTransfers: wrong address');
validateLogOpts(opts);
return await Promise.all([
// Single
this.ethLogs(
ERC1155_SINGLE.topics({ operator: null, from: address, to: null, id: null, value: null }),
opts
),
this.ethLogs(
ERC1155_SINGLE.topics({ operator: null, from: null, to: address, id: null, value: null }),
opts
),
// Batch
this.ethLogs(
ERC1155_BATCH.topics({ operator: null, from: address, to: null, ids: null, values: null }),
opts
),
this.ethLogs(
ERC1155_BATCH.topics({ operator: null, from: null, to: address, ids: null, values: null }),
opts
),
]);
}
async txInfo(
txHash: string,
opts: TxInfoOpts = {}
): Promise<{
type: 'legacy' | 'eip2930' | 'eip1559' | 'eip4844' | 'eip7702';
info: any;
receipt: any;
raw: string | undefined;
}> {
let [info, receipt] = await Promise.all([
this.call('eth_getTransactionByHash', txHash),
this.call('eth_getTransactionReceipt', txHash),
]);
info = fixTxInfo(info);
receipt = fixTxReceipt(receipt);
const type = Object.keys(TxVersions)[info.type] as keyof typeof TxVersions;
// This is not strictly neccessary, but allows to store tx info in very compact format and remove unneccessary fields
// Also, there is additional validation that node returned actual with correct hash/sender and not corrupted stuff.
let raw: string | undefined = undefined;
try {
const rawData: Record<string, any> = {
nonce: info.nonce,
gasLimit: info.gas,
to: info.to === null ? '0x' : info.to,
value: info.value,
data: info.input,
r: info.r,
s: info.s,
yParity: Number(info.v),
chainId: info.chainId,
};
if (info.accessList) rawData.accessList = info.accessList;
if (info.maxFeePerBlobGas) rawData.maxFeePerBlobGas = info.maxFeePerBlobGas;
if (info.blobVersionedHashes) rawData.blobVersionedHashes = info.blobVersionedHashes;
if (info.maxFeePerGas) {
rawData.maxFeePerGas = info.maxFeePerGas;
rawData.maxPriorityFeePerGas = info.maxPriorityFeePerGas;
} else if (info.gasPrice) rawData.gasPrice = info.gasPrice;
if (type === 'legacy')
Object.assign(rawData, legacySig.encode({ v: info.v, r: info.r, s: info.s }));
const tx = new Transaction(type, rawData as any, false, true);
if (tx.recoverSender().address.toLowerCase() !== info.from.toLowerCase())
throw new Error('txInfo: wrong sender');
if (receipt.transactionHash !== `0x${tx.hash}`) throw new Error('txInfo: wrong hash');
raw = tx.toHex();
} catch (err) {
// This can crash if something wrong with our parser or limits, so
// we have option to make network code to work even if rebuilding is crashed
if (!opts.ignoreTxRebuildErrors) throw err;
}
if (opts.blockCallback && info.blockNumber !== null) opts.blockCallback(info.blockNumber);
return { type, info, receipt, raw };
}
async tokenInfo(contract: string): Promise<TokenInfo | TokenError> {
const c = createContract(ERC20, this, contract);
const t = await wait({
code: this.call('eth_getCode', contract, 'latest'),
capabilities: this.contractCapabilities(contract),
// We call all stuff at same time to reduce latency (should be done in single req if batched)
name: c.name.call(), // ERC-20 (optional), ERC-721 (metada)
symbol: c.symbol.call(), // ERC-20 (optional), ERC-721 (metadata)
decimals: c.decimals.call(), // ERC-20 (optional), ERC-721 (enumarable)
totalSupply: c.totalSupply.call(), // ERC-20 (required), ERC-721
});
// No code, probably self-destructed
if (t.code === '0x') return { contract, error: 'not contract or destructed' };
if (t.capabilities && t.capabilities.erc1155) {
// All metadata is inside URI per tokenId to outside network stuff (maybe ipfs), so nothing to do here.
return { contract, abi: 'ERC1155' };
}
if (t.capabilities && t.capabilities.erc721) {
const res = { contract, abi: 'ERC721' };
if (t.capabilities.erc721_metadata) {
if (t.name === undefined) return { contract, error: 'ERC721+Metadata without name' };
if (t.symbol === undefined) return { contract, error: 'ERC721+Metadata without symbol' };
Object.assign(res, { name: t.name, symbol: t.symbol, metadata: true });
}
if (t.capabilities.erc721_enumerable) {
if (t.totalSupply === undefined)
return { contract, error: 'ERC721+Enumerable without totalSupply' };
Object.assign(res, { totalSupply: t.totalSupply, enumerable: true });
}
return res as TokenInfo;
}
if (t.totalSupply === undefined) return { contract, error: 'not ERC20 token' }; // If there is no totalSupply, it is not ERC20!
return {
contract,
abi: 'ERC20',
name: t.name,
symbol: t.symbol,
totalSupply: t.totalSupply,
decimals: t.decimals ? Number(t.decimals) : undefined,
};
}
private async tokenBalanceSingle(
address: string,
token: TokenInfo | TokenError,
tokenIds?: Set<bigint>
): Promise<TokenBalanceSingle | TokenError> {
if ('error' in token) return token;
if (token.abi === 'ERC20') {
const balance = await createContract(ERC20, this, token.contract).balanceOf.call(address);
if (tokenIds && (tokenIds.size > 1 || Array.from(tokenIds)[0] !== 1n)) {
return { contract: token.contract, error: 'unexpected tokenIds for ERC20' };
}
return new Map([[1n, balance]]);
} else if (token.abi === 'ERC721') {
const c = createContract(ERC721, this, token.contract);
const balance = await c.balanceOf.call(address);
if (!token.enumerable) {
if (!tokenIds) {
if (!balance) return new Map(); // no tokens owned by user
return {
contract: token.contract,
error: 'erc721 contract not enumerable, but owner has ' + balance + ' tokens',
};
}
// if we cannot enumerate, but has tokenIds, we can check if tokenIds still owned by account
const ids = Array.from(tokenIds);
const owners = await Promise.all(ids.map((i) => c.ownerOf.call(i)));
return new Map(
ids.map((i, j) => [i, owners[j].toLowerCase() === address.toLowerCase() ? 1n : 0n])
);
}
// if we can fetch tokenIds: always do this
const p = [];
for (let i = 0; i < balance; i++)
p.push(c.tokenOfOwnerByIndex.call({ owner: address, index: BigInt(i) }));
tokenIds = new Set(await Promise.all(p));
const ids = Array.from(tokenIds!);
return new Map(ids.map((i) => [i, 1n]));
} else if (token.abi === 'ERC1155') {
// This is pretty bad standard, because it doesn't allow enumeration of tokenIds for owner
if (!tokenIds)
return { contract: token.contract, error: 'cannot fetch erc1155 without tokenIds' };
const c = createContract(ERC1155, this, token.contract);
const ids = Array.from(tokenIds);
const balances = await c.balanceOfBatch.call({ accounts: ids.map((_) => address), ids });
const res = new Map(ids.map((i, j) => [i, balances[j]]));
return res;
}
throw new Error('unknown token type');
}
async tokenURI(
token: TokenInfo | TokenError | string,
tokenId: bigint
): Promise<string | TokenError> {
if (typeof token === 'string') token = await this.tokenInfo(token);
if ('error' in token) return token;
if (token.abi === 'ERC721') {
const c = createContract(ERC721, this, token.contract);
if (!token.metadata) return { contract: token.contract, error: 'erc721 without metadata' };
return c.tokenURI.call(tokenId);
} else if (token.abi === 'ERC1155') {
const c = createContract(ERC1155, this, token.contract);
return c.uri.call(tokenId);
}
return { contract: token.contract, error: 'not supported token type' };
}
async tokenBalances(
address: string,
tokens: string[],
tokenIds?: Record<string, Set<bigint>>
): Promise<TokenBalances> {
// New API requires data from tokenInfo (which is slow and should be cached).
// But for compat with old API, we do tokenInfo call if contract address (as string) presented
const _tokens = await Promise.all(
tokens.map((i) => (typeof i === 'string' ? this.tokenInfo(i) : i))
);
const balances = await Promise.all(
_tokens.map((i) => this.tokenBalanceSingle(address, i, tokenIds && tokenIds[i.contract]))
);
return Object.fromEntries(_tokens.map((i, j) => [i.contract, balances[j]])) as any;
}
private decodeTokenTransfer(token: TokenInfo, log: Log): TokenTransfer | undefined {
if ('error' in token) return;
if (token.abi === 'ERC20') {
try {
const decoded = ERC_TRANSFER.decode(log.topics, log.data);
return {
...token,
contract: log.address,
to: decoded.to,
from: decoded.from,
tokens: new Map([[1n, decoded.value]]),
};
} catch (e) {}
// Weth doesn't issue Transfer event on Deposit/Withdrawal
// NOTE: we don't filter for WETH_CONTRACT here in case of other contracts with similar API or different networks
try {
const decoded = WETH_DEPOSIT.decode(log.topics, log.data);
return {
...token,
contract: log.address,
from: log.address,
to: decoded.dst,
tokens: new Map([[1n, decoded.wad]]),
};
} catch (e) {}
try {
const decoded = WETH_WITHDRAW.decode(log.topics, log.data);
return {
...token,
contract: log.address,
from: decoded.src,
to: log.address,
tokens: new Map([[1n, decoded.wad]]),
};
} catch (e) {}
} else if (token.abi === 'ERC721') {
try {
const decoded = ERC721_TRANSFER.decode(log.topics, log.data);
return {
...token,
from: decoded.from,
to: decoded.to,
tokens: new Map([[decoded.tokenId, 1n]]),
};
} catch (e) {}
} else if (token.abi === 'ERC1155') {
try {
const decoded = ERC1155_SINGLE.decode(log.topics, log.data);
return {
...token,
from: decoded.from,
to: decoded.to,
tokens: new Map([[decoded.id, decoded.value]]),
};
} catch (e) {}
try {
const decoded = ERC1155_BATCH.decode(log.topics, log.data);
return {
...token,
from: decoded.from,
to: decoded.to,
tokens: new Map(decoded.ids.map((i, j) => [i, decoded.values[j]])),
};
} catch (e) {}
}
return; // unknown token type
}
// We want to get all transactions related to address, that means:
// - from or to equals address in tx
// - any internal tx from or to equals address in tx
// - any erc20 token transfer which hash address in src or dst
// - erc721 is exactly same function signature as erc20 (need to detect after getting transactions)
// - erc1155: from/to + single/batch
// trace_filter (web3) returns information only for first two cases, most of explorers returns only first case.
async transfers(address: string, opts: TraceOpts & LogOpts = {}): Promise<TxTransfers[]> {
const txCache: Record<string, any> = {};
const blockCache: Record<number, any> = {};
const tokenCache: Record<string, any> = {};
const _opts = {
...opts,
txCallback: (txHash: string) => {
if (txCache[txHash]) return;
txCache[txHash] = this.txInfo(txHash, opts);
},
blockCallback: (blockNumber: number) => {
if (blockCache[blockNumber]) return;
blockCache[blockNumber] = this.blockInfo(blockNumber);
},
contractCallback: (address: string) => {
if (tokenCache[address]) return;
tokenCache[address] = this.tokenInfo(address);
},
};
if (!_opts.fromBlock) _opts.fromBlock = 0;
// This runs in parallel and executes callbacks
// Note, we ignore logs and weth, but they will call callbacks and fetch related
const [actions, _logs, _weth] = await Promise.all([
this.internalTransactions(address, _opts),
this.tokenTransfers(address, _opts),
this.wethTransfers(address, _opts),
this.erc1155Transfers(address, _opts),
]);
const mapCache = async (cache: Record<any, any>) => {
const keys = Object.keys(cache);
const values = await Promise.all(Object.values(cache));
return Object.fromEntries(values.map((v, i) => [keys[i], v]));
};
// it is ok to do this sequentially, since promises already started and probably resolved at this point
const blocks = await mapCache(blockCache);
const tx = await mapCache(txCache);
const tokens = await mapCache(tokenCache);
const actionPerTx = group(actions, 'transactionHash');
// Sort transactions by [blockNumber, transactionIndex]
const _txHashes = Object.entries(tx).map(
([k, v]) => [k, v.info.blockNumber, v.info.transactionIndex] as [string, number, number]
);
_txHashes.sort((a, b) => (a[1] !== b[1] ? a[1] - b[1] : a[2] - b[2]));
const txHashes = _txHashes.map((i) => i[0]);
return txHashes.map((txHash) => {
const { info, receipt } = tx[txHash] as { info: TxInfo; receipt: TxReceipt };
const actions = actionPerTx[txHash];
const block = info.blockNumber !== null ? blocks[info.blockNumber] : undefined;
const transfers: Transfer[] = [];
if (actions) {
for (const a of actions)
transfers.push({ from: a.action.from, to: a.action.to, value: a.action.value });
} else {
// If we have action, it was call to contract and transfer from tx is already added
transfers.push({ from: info.from, to: info.to, value: info.value });
}
// cumulativeGasUsed includes all transactions before that in block, so useless. gasUsed is correct even for internal transactions
transfers.push({ from: info.from, value: receipt.gasUsed * receipt.effectiveGasPrice });
// Tokens
const tokenTransfers: TokenTransfer[] = [];
for (const log of receipt.logs) {
const tokenInfo = tokens[log.address];
if (!tokenInfo) continue;
const tt = this.decodeTokenTransfer(tokenInfo, log);
if (tt) tokenTransfers.push(tt);
}
return {
hash: txHash,
timestamp: block.timestamp,
block: info.blockNumber !== null ? info.blockNumber : undefined,
reverted: !receipt.status,
transfers,
tokenTransfers,
info: { ...tx[txHash], block, actions },
};
}) as TxTransfers[];
}
async allowances(address: string, opts: LogOpts = {}): Promise<TxAllowances> {
const approval = events(ERC20).Approval;
// ERC-721/ERC-1155: +ApprovalForAll
// ERC-1761 Scoped Approval for partial with 1155/721?
const topics = approval.topics({ owner: address, spender: null, value: null });
const logs = await this.ethLogs(topics, opts);
// res[tokenContract][spender] = value
const res: TxAllowances = {};
for (const l of logs) {
const decoded = approval.decode(l.topics, l.data);
if (decoded.owner.toLowerCase() !== address.toLowerCase()) continue;
if (!res[l.address]) res[l.address] = {};
res[l.address][decoded.spender] = decoded.value;
}
return res;
}
}
/**
* Calculates balances at specific point in time after tx.
* Also, useful as a sanity check in case we've missed something.
* Info from multiple addresses can be merged (sort everything first).
*/
export function calcTransfersDiff(transfers: TxTransfers[]): (TxTransfers & Balances)[] {
// address -> balance
const balances: Record<string, bigint> = {};
// contract -> address -> tokenId -> balance
const tokenBalances: Record<string, Record<string, Map<bigint, bigint>>> = {};
let _0 = BigInt(0);
for (const t of transfers) {
for (const it of t.transfers) {
if (it.from) {
if (balances[it.from] === undefined) balances[it.from] = _0;
balances[it.from] -= it.value;
}
if (it.to) {
if (balances[it.to] === undefined) balances[it.to] = _0;
balances[it.to] += it.value;
}
}
for (const tt of t.tokenTransfers) {
if (!tokenBalances[tt.contract]) tokenBalances[tt.contract] = {};
const token = tokenBalances[tt.contract];
for (const [tokenId, value] of tt.tokens) {
if (token[tt.from] === undefined) token[tt.from] = new Map();
if (token[tt.to] === undefined) token[tt.to] = new Map();
const fromTokens = token[tt.from];
const toTokens = token[tt.to];
fromTokens.set(tokenId, (fromTokens.get(tokenId) || _0) - value);
toTokens.set(tokenId, (toTokens.get(tokenId) || _0) + value);
}
}
Object.assign(t, {
balances: { ...balances },
// deep copy
tokenBalances: Object.fromEntries(
Object.entries(tokenBalances).map(([k, v]) => [k, { ...v }])
),
});
}
return transfers as (TxTransfers & Balances)[];
}

316
dev/env/node_modules/micro-eth-signer/src/net/chainlink.ts generated vendored Executable file
View File

@@ -0,0 +1,316 @@
import { createContract, tokenFromSymbol } from '../abi/index.ts';
import { type IWeb3Provider, createDecimal } from '../utils.ts';
const ABI = [
{
type: 'function',
name: 'latestRoundData',
outputs: [
{ name: 'roundId', type: 'uint80' },
{ name: 'answer', type: 'int256' },
{ name: 'startedAt', type: 'uint256' },
{ name: 'updatedAt', type: 'uint256' },
{ name: 'answeredInRound', type: 'uint80' },
],
},
] as const;
export const TOKENS: Record<string, { decimals: number; contract: string; tokenContract: string }> =
{
'1INCH': {
decimals: 8,
contract: '0xc929ad75b72593967de83e7f7cda0493458261d9',
tokenContract: '0x111111111117dc0aa78b770fa6a738034120c302',
},
AAPL: {
decimals: 8,
contract: '0x139c8512cde1778e9b9a8e721ce1aebd4dd43587',
tokenContract: '0x7edc9e8a1196259b7c6aba632037a9443d4e14f7',
},
AAVE: {
decimals: 8,
contract: '0x547a514d5e3769680ce22b2361c10ea13619e8a9',
tokenContract: '0x7fc66500c84a76ad7e9c93437bfc5ac33e2ddae9',
},
ADX: {
decimals: 8,
contract: '0x231e764b44b2c1b7ca171fa8021a24ed520cde10',
tokenContract: '0x4470bb87d77b963a013db939be332f927f2b992e',
},
AKRO: {
decimals: 8,
contract: '0xb23d105df4958b4b81757e12f2151b5b5183520b',
tokenContract: '0x8ab7404063ec4dbcfd4598215992dc3f8ec853d7',
},
AMP: {
decimals: 8,
contract: '0x8797abc4641de76342b8ace9c63e3301dc35e3d8',
tokenContract: '0xff20817765cb7f73d4bde2e66e067e58d11095c2',
},
AMPL: {
decimals: 18,
contract: '0xe20ca8d7546932360e37e9d72c1a47334af57706',
tokenContract: '0xd46ba6d942050d489dbd938a2c909a5d5039a161',
},
AMZN: {
decimals: 8,
contract: '0x8994115d287207144236c13be5e2bdbf6357d9fd',
tokenContract: '0xd6a073d973f95b7ce2ecf2b19224fa12103cf460',
},
ANKR: {
decimals: 8,
contract: '0x7eed379bf00005cfed29fed4009669de9bcc21ce',
tokenContract: '0x8290333cef9e6d528dd5618fb97a76f268f3edd4',
},
BADGER: {
decimals: 8,
contract: '0x66a47b7206130e6ff64854ef0e1edfa237e65339',
tokenContract: '0x3472a5a71965499acd81997a54bba8d852c6e53d',
},
BAND: {
decimals: 8,
contract: '0x919c77acc7373d000b329c1276c76586ed2dd19f',
tokenContract: '0xba11d00c5f74255f56a5e366f4f77f5a186d7f55',
},
BAT: {
decimals: 8,
contract: '0x9441d7556e7820b5ca42082cfa99487d56aca958',
tokenContract: '0x0d8775f648430679a709e98d2b0cb6250d2887ef',
},
BNB: {
decimals: 8,
contract: '0x14e613ac84a31f709eadbdf89c6cc390fdc9540a',
tokenContract: '0xb8c77482e45f1f44de1745f52c74426c631bdd52',
},
BNT: {
decimals: 8,
contract: '0x1e6cf0d433de4fe882a437abc654f58e1e78548c',
tokenContract: '0x1f573d6fb3f13d689ff844b4ce37794d79a7ff1c',
},
BTM: {
decimals: 8,
contract: '0x9fccf42d21ab278e205e7bb310d8979f8f4b5751',
tokenContract: '0xcb97e65f07da24d46bcdd078ebebd7c6e6e3d750',
},
BUSD: {
decimals: 8,
contract: '0x833d8eb16d306ed1fbb5d7a2e019e106b960965a',
tokenContract: '0x4fabb145d64652a948d72533023f6e7a623c7c53',
},
COMP: {
decimals: 8,
contract: '0xdbd020caef83efd542f4de03e3cf0c28a4428bd5',
tokenContract: '0xc00e94cb662c3520282e6f5717214004a7f26888',
},
COVER: {
decimals: 8,
contract: '0x0ad50393f11ffac4dd0fe5f1056448ecb75226cf',
tokenContract: '0x4688a8b1f292fdab17e9a90c8bc379dc1dbd8713',
},
CRO: {
decimals: 8,
contract: '0x00cb80cf097d9aa9a3779ad8ee7cf98437eae050',
tokenContract: '0xa0b73e1ff0b80914ab6fe0444e65848c4c34450b',
},
CRV: {
decimals: 8,
contract: '0xcd627aa160a6fa45eb793d19ef54f5062f20f33f',
tokenContract: '0xd533a949740bb3306d119cc777fa900ba034cd52',
},
DAI: {
decimals: 8,
contract: '0xaed0c38402a5d19df6e4c03f4e2dced6e29c1ee9',
tokenContract: '0x60d9564303c70d3f040ea9393d98d94f767d020c',
},
DPI: {
decimals: 8,
contract: '0xd2a593bf7594ace1fad597adb697b5645d5eddb2',
tokenContract: '0x1494ca1f11d487c2bbe4543e90080aeba4ba3c2b',
},
EOS: {
decimals: 8,
contract: '0x10a43289895eaff840e8d45995bba89f9115ecee',
tokenContract: '0x86fa049857e0209aa7d9e616f7eb3b3b78ecfdb0',
},
FXS: {
decimals: 8,
contract: '0x6ebc52c8c1089be9eb3945c4350b68b8e4c2233f',
tokenContract: '0x3432b6a60d23ca0dfca7761b7ab56459d9c964d0',
},
HT: {
decimals: 8,
contract: '0xe1329b3f6513912caf589659777b66011aee5880',
tokenContract: '0x6f259637dcd74c767781e37bc6133cd6a68aa161',
},
IOST: {
decimals: 8,
contract: '0xd0935838935349401c73a06fcde9d63f719e84e5',
tokenContract: '0xfa1a856cfa3409cfa145fa4e20eb270df3eb21ab',
},
KNC: {
decimals: 8,
contract: '0xf8ff43e991a81e6ec886a3d281a2c6cc19ae70fc',
tokenContract: '0xdd974d5c2e2928dea5f71b9825b8b646686bd200',
},
LINK: {
decimals: 8,
contract: '0x2c1d072e956affc0d435cb7ac38ef18d24d9127c',
tokenContract: '0x514910771af9ca656af840dff83e8264ecf986ca',
},
LRC: {
decimals: 8,
contract: '0xfd33ec6abaa1bdc3d9c6c85f1d6299e5a1a5511f',
tokenContract: '0xef68e7c694f40c8202821edf525de3782458639f',
},
MATIC: {
decimals: 8,
contract: '0x7bac85a8a13a4bcd8abb3eb7d6b4d632c5a57676',
tokenContract: '0x7d1afa7b718fb893db30a3abc0cfc608aacfebb0',
},
MKR: {
decimals: 8,
contract: '0xec1d1b3b0443256cc3860e24a46f108e699484aa',
tokenContract: '0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2',
},
MTA: {
decimals: 8,
contract: '0xc751e86208f0f8af2d5cd0e29716ca7ad98b5ef5',
tokenContract: '0xa3bed4e1c75d00fa6f4e5e6922db7261b5e9acd2',
},
NFLX: {
decimals: 8,
contract: '0x67c2e69c5272b94af3c90683a9947c39dc605dde',
tokenContract: '0x0a3dc37762f0102175fd43d3871d7fa855626146',
},
NMR: {
decimals: 8,
contract: '0xcc445b35b3636bc7cc7051f4769d8982ed0d449a',
tokenContract: '0x1776e1f26f98b1a5df9cd347953a26dd3cb46671',
},
OCEAN: {
decimals: 8,
contract: '0x7ece4e4e206ed913d991a074a19c192142726797',
tokenContract: '0x967da4048cd07ab37855c090aaf366e4ce1b9f48',
},
OKB: {
decimals: 8,
contract: '0x22134617ae0f6ca8d89451e5ae091c94f7d743dc',
tokenContract: '0x75231f58b43240c9718dd58b4967c5114342a86c',
},
OMG: {
decimals: 8,
contract: '0x7d476f061f8212a8c9317d5784e72b4212436e93',
tokenContract: '0xd26114cd6ee289accf82350c8d8487fedb8a0c07',
},
OXT: {
decimals: 8,
contract: '0xd75aaae4af0c398ca13e2667be57af2cca8b5de6',
tokenContract: '0x4575f41308ec1483f3d399aa9a2826d74da13deb',
},
REN: {
decimals: 8,
contract: '0x0f59666ede214281e956cb3b2d0d69415aff4a01',
tokenContract: '0x408e41876cccdc0f92210600ef50372656052a38',
},
SAND: {
decimals: 8,
contract: '0x35e3f7e558c04ce7eee1629258ecbba03b36ec56',
tokenContract: '0x3845badade8e6dff049820680d1f14bd3903a5d0',
},
SNX: {
decimals: 8,
contract: '0xdc3ea94cd0ac27d9a86c180091e7f78c683d3699',
tokenContract: '0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f',
},
SUSHI: {
decimals: 8,
contract: '0xcc70f09a6cc17553b2e31954cd36e4a2d89501f7',
tokenContract: '0x6b3595068778dd592e39a122f4f5a5cf09c90fe2',
},
SXP: {
decimals: 8,
contract: '0xfb0cfd6c19e25db4a08d8a204a387cea48cc138f',
tokenContract: '0x8ce9137d39326ad0cd6491fb5cc0cba0e089b6a9',
},
UNI: {
decimals: 8,
contract: '0x553303d460ee0afb37edff9be42922d8ff63220e',
tokenContract: '0x1f9840a85d5af5bf1d1762f925bdaddc4201f984',
},
USDC: {
decimals: 8,
contract: '0x8fffffd4afb6115b954bd326cbe7b4ba576818f6',
tokenContract: '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',
},
USDK: {
decimals: 8,
contract: '0xfac81ea9dd29d8e9b212acd6edbeb6de38cb43af',
tokenContract: '0x1c48f86ae57291f7686349f12601910bd8d470bb',
},
USDT: {
decimals: 8,
contract: '0x3e7d1eab13ad0104d2750b8863b489d65364e32d',
tokenContract: '0xdac17f958d2ee523a2206206994597c13d831ec7',
},
YFI: {
decimals: 8,
contract: '0xa027702dbb89fbd58938e4324ac03b58d812b0e1',
tokenContract: '0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e',
},
ZRX: {
decimals: 8,
contract: '0x2885d15b8af22648b98b122b22fdf4d2a56c6023',
tokenContract: '0xe41d2489571d322189246dafa5ebde1f4699f498',
},
SUSD: {
decimals: 8,
contract: '0xad35bd71b9afe6e4bdc266b345c198eadef9ad94',
tokenContract: '0x57ab1e02fee23774580c119740129eac7081e9d3',
},
// Wrapped tokens uses price of original coin
WBTC: {
decimals: 8,
contract: '0xf4030086522a5beea4988f8ca5b36dbc97bee88c',
tokenContract: tokenFromSymbol('WBTC')!.contract,
},
WETH: {
decimals: 8,
contract: '0x5f4ec3df9cbd43714fe2740f5e3616155c5b8419',
tokenContract: tokenFromSymbol('WETH')!.contract,
},
};
export default class Chainlink {
readonly net: IWeb3Provider;
constructor(net: IWeb3Provider) {
this.net = net;
}
async price(contract: string, decimals: number): Promise<number> {
const prices = createContract(ABI, this.net, contract);
let res = await prices.latestRoundData.call();
const num = Number.parseFloat(createDecimal(decimals).encode(res.answer));
if (Number.isNaN(num)) throw new Error('invalid data received');
return num;
}
async coinPrice(symbol: string): Promise<number> {
// Only common coins
const COINS: Record<string, { decimals: number; contract: string }> = {
BCH: { decimals: 8, contract: '0x9f0f69428f923d6c95b781f89e165c9b2df9789d' },
BTC: { decimals: 8, contract: '0xf4030086522a5beea4988f8ca5b36dbc97bee88c' },
DOGE: { decimals: 8, contract: '0x2465cefd3b488be410b941b1d4b2767088e2a028' },
ETH: { decimals: 8, contract: '0x5f4ec3df9cbd43714fe2740f5e3616155c5b8419' },
XMR: { decimals: 8, contract: '0xfa66458cce7dd15d8650015c4fce4d278271618f' },
ZEC: { decimals: 8, contract: '0xd54b033d48d0475f19c5fccf7484e8a981848501' },
};
const coin = COINS[symbol.toUpperCase()];
if (!coin) throw new Error(`micro-web3/chainlink: unknown coin: ${symbol}`);
return await this.price(coin.contract, coin.decimals);
}
async tokenPrice(symbol: string): Promise<number> {
const token = TOKENS[symbol.toUpperCase()];
if (!token) throw new Error(`micro-web3/chainlink: unknown token: ${symbol}`);
return await this.price(token.contract, token.decimals);
}
}

76
dev/env/node_modules/micro-eth-signer/src/net/ens.ts generated vendored Executable file
View File

@@ -0,0 +1,76 @@
import { keccak_256 } from '@noble/hashes/sha3';
import { concatBytes } from '@noble/hashes/utils';
import { createContract } from '../abi/decoder.ts';
import { type IWeb3Provider, strip0x } from '../utils.ts';
// No support for IDN names
export function namehash(address: string): Uint8Array {
let res = new Uint8Array(32);
if (!address) return res;
for (let label of address.split('.').reverse())
res = keccak_256(concatBytes(res, keccak_256(label)));
return res;
}
export default class ENS {
static ADDRESS_ZERO = '0x0000000000000000000000000000000000000000';
static REGISTRY = '0x00000000000C2E074eC69A0dFb2997BA6C7d2e1e';
static REGISTRY_CONTRACT = [
{
name: 'resolver',
type: 'function',
inputs: [{ name: 'node', type: 'bytes32' }],
outputs: [{ type: 'address' }],
},
] as const;
static RESOLVER_CONTRACT = [
{
name: 'addr',
type: 'function',
inputs: [{ name: 'node', type: 'bytes32' }],
outputs: [{ type: 'address' }],
},
{
name: 'name',
type: 'function',
inputs: [{ name: 'node', type: 'bytes32' }],
outputs: [{ type: 'string' }],
},
] as const;
readonly net: IWeb3Provider;
constructor(net: IWeb3Provider) {
this.net = net;
}
async getResolver(name: string): Promise<string | undefined> {
const contract = createContract(ENS.REGISTRY_CONTRACT, this.net, ENS.REGISTRY);
const res = await contract.resolver.call(namehash(name));
if (res === ENS.ADDRESS_ZERO) return;
return res;
}
async nameToAddress(name: string): Promise<string | undefined> {
const resolver = await this.getResolver(name);
if (!resolver) return;
const contract = createContract(ENS.RESOLVER_CONTRACT, this.net, resolver);
const addr = await contract.addr.call(namehash(name));
if (addr === ENS.ADDRESS_ZERO) return;
return addr;
}
async addressToName(address: string): Promise<string | undefined> {
const addrDomain = `${strip0x(address).toLowerCase()}.addr.reverse`;
const resolver = await this.getResolver(addrDomain);
if (!resolver) return;
const contract = createContract(ENS.RESOLVER_CONTRACT, this.net, resolver);
const name = await contract.name.call(namehash(addrDomain));
if (!name) return;
// From spec: ENS does not enforce accuracy of reverse records -
// anyone may claim that the name for their address is 'alice.eth'.
// To be certain the claim is accurate, you must always perform a forward
// resolution for the returned name and check whether it matches the original address.
const realAddr = await this.nameToAddress(name);
if (realAddr !== address) return;
return name;
}
}

8
dev/env/node_modules/micro-eth-signer/src/net/index.ts generated vendored Executable file
View File

@@ -0,0 +1,8 @@
import { Web3Provider, calcTransfersDiff } from './archive.ts';
import Chainlink from './chainlink.ts';
import ENS from './ens.ts';
import UniswapV2 from './uniswap-v2.ts';
import UniswapV3 from './uniswap-v3.ts';
// There are many low level APIs inside which are not exported yet.
export { Chainlink, ENS, UniswapV2, UniswapV3, Web3Provider, calcTransfersDiff };

View File

@@ -0,0 +1,214 @@
import { tokenFromSymbol } from '../abi/index.ts';
import { addr } from '../index.ts';
import { type IWeb3Provider, createDecimal, ethHex, isBytes, weieth } from '../utils.ts';
export type SwapOpt = { slippagePercent: number; ttl: number };
export const DEFAULT_SWAP_OPT: SwapOpt = { slippagePercent: 0.5, ttl: 30 * 60 };
// [res?.id, res?.payinAddress, res?.amountExpectedTo]
export type ExchangeTx = {
address: string;
amount: string;
currency: string;
expectedAmount: string;
data?: string;
allowance?: { token: string; contract: string; amount: string };
txId?: string;
};
export type SwapElm = {
name: string; // Human readable exchange name
expectedAmount: string;
tx: (fromAddress: string, toAddress: string) => Promise<ExchangeTx>;
};
export function addPercent(n: bigint, _perc: number): bigint {
const perc = BigInt((_perc * 10000) | 0);
const p100 = BigInt(100) * BigInt(10000);
return ((p100 + perc) * n) / p100;
}
export function isPromise(o: unknown): boolean {
if (!o || !['object', 'function'].includes(typeof o)) return false;
return typeof (o as any).then === 'function';
}
// Promise.all(), but allows to wait for nested objects with promises and to ignore errors.
// It's hard to make ignore_errors argument optional in current TS.
export type UnPromise<T> = T extends Promise<infer U> ? U : T;
type NestedUnPromise<T> = { [K in keyof T]: NestedUnPromise<UnPromise<T[K]>> };
type UnPromiseIgnore<T> = T extends Promise<infer U> ? U | undefined : T;
type NestedUnPromiseIgnore<T> = { [K in keyof T]: NestedUnPromiseIgnore<UnPromiseIgnore<T[K]>> };
export async function awaitDeep<T, E extends boolean | undefined>(
o: T,
ignore_errors: E
): Promise<E extends true ? NestedUnPromiseIgnore<T> : NestedUnPromise<T>> {
let promises: Promise<any>[] = [];
const traverse = (o: any): any => {
if (Array.isArray(o)) return o.map((i) => traverse(i));
if (isBytes(o)) return o;
if (isPromise(o)) return { awaitDeep: promises.push(o) };
if (typeof o === 'object') {
let ret: Record<string, any> = {};
for (let k in o) ret[k] = traverse(o[k]);
return ret;
}
return o;
};
let out = traverse(o);
let values: any[];
if (!ignore_errors) values = await Promise.all(promises);
else {
values = (await Promise.allSettled(promises)).map((i) =>
i.status === 'fulfilled' ? i.value : undefined
);
}
const trBack = (o: any): any => {
if (Array.isArray(o)) return o.map((i) => trBack(i));
if (isBytes(o)) return o;
if (typeof o === 'object') {
if (typeof o === 'object' && o.awaitDeep) return values[o.awaitDeep - 1];
let ret: Record<string, any> = {};
for (let k in o) ret[k] = trBack(o[k]);
return ret;
}
return o;
};
return trBack(out);
}
export type CommonBase = {
contract: string;
} & import('../abi/decoder.js').ContractInfo;
export const COMMON_BASES: CommonBase[] = [
'WETH',
'DAI',
'USDC',
'USDT',
'COMP',
'MKR',
'WBTC',
'AMPL',
]
.map((i) => tokenFromSymbol(i))
.filter((i) => !!i);
export const WETH: string = tokenFromSymbol('WETH')!.contract;
if (!WETH) throw new Error('WETH is undefined!');
export function wrapContract(contract: string): string {
contract = contract.toLowerCase();
return contract === 'eth' ? WETH : contract;
}
export function sortTokens(a: string, b: string): [string, string] {
a = wrapContract(a);
b = wrapContract(b);
if (a === b) throw new Error('uniswap.sortTokens: same token!');
return a < b ? [a, b] : [b, a];
}
export function isValidEthAddr(address: string): boolean {
return addr.isValid(address);
}
export function isValidUniAddr(address: string): boolean {
return address === 'eth' || isValidEthAddr(address);
}
export type Token = { decimals: number; contract: string; symbol: string };
function getToken(token: 'eth' | Token): Token {
if (typeof token === 'string' && token.toLowerCase() === 'eth')
return { symbol: 'ETH', decimals: 18, contract: 'eth' };
return token as Token;
}
export abstract class UniswapAbstract {
abstract name: string;
abstract contract: string;
abstract bestPath(fromCoin: string, toCoin: string, inputAmount: bigint): any;
abstract txData(
toAddress: string,
fromCoin: string,
toCoin: string,
path: any,
inputAmount?: bigint,
outputAmount?: bigint,
opt?: { slippagePercent: number }
): any;
readonly net: IWeb3Provider;
constructor(net: IWeb3Provider) {
this.net = net;
}
// private async coinInfo(netName: string) {
// if (!validateAddr(netName)) return;
// if (netName === 'eth') return { symbol: 'ETH', decimals: 18 };
// //return await this.mgr.tokenInfo('eth', netName);
// }
async swap(
fromCoin: 'eth' | Token,
toCoin: 'eth' | Token,
amount: string,
opt: SwapOpt = DEFAULT_SWAP_OPT
): Promise<
| {
name: string;
expectedAmount: string;
tx: (
_fromAddress: string,
toAddress: string
) => Promise<{
amount: string;
address: any;
expectedAmount: string;
data: string;
allowance: any;
}>;
}
| undefined
> {
const fromInfo = getToken(fromCoin);
const toInfo = getToken(toCoin);
if (!fromInfo || !toInfo) return;
const fromContract = fromInfo.contract.toLowerCase();
const toContract = toInfo.contract.toLowerCase();
if (!fromContract || !toContract) return;
const fromDecimal = createDecimal(fromInfo.decimals);
const toDecimal = createDecimal(toInfo.decimals);
const inputAmount = fromDecimal.decode(amount);
try {
const path = await this.bestPath(fromContract, toContract, inputAmount);
const expectedAmount = toDecimal.encode(path.amountOut as bigint);
return {
name: this.name,
expectedAmount,
tx: async (_fromAddress: string, toAddress: string) => {
const txUni = this.txData(
toAddress,
fromContract,
toContract,
path,
inputAmount,
undefined,
opt
);
return {
amount: weieth.encode(txUni.value),
address: txUni.to,
expectedAmount,
data: ethHex.encode(txUni.data),
allowance: txUni.allowance && {
token: txUni.allowance.token,
contract: this.contract,
amount: fromDecimal.encode(txUni.allowance.amount),
},
};
},
};
} catch (e) {
// @ts-ignore
console.log('E', e);
return;
}
}
}

213
dev/env/node_modules/micro-eth-signer/src/net/uniswap-v2.ts generated vendored Executable file
View File

@@ -0,0 +1,213 @@
import { keccak_256 } from '@noble/hashes/sha3';
import { concatBytes, hexToBytes } from '@noble/hashes/utils';
import { type ContractInfo, createContract } from '../abi/decoder.ts';
import { default as UNISWAP_V2_ROUTER, UNISWAP_V2_ROUTER_CONTRACT } from '../abi/uniswap-v2.ts';
import { type IWeb3Provider, ethHex } from '../utils.ts';
import * as uni from './uniswap-common.ts';
const FACTORY_ADDRESS = '0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f';
const INIT_CODE_HASH = hexToBytes(
'96e8ac4277198ff8b6f785478aa9a39f403cb768dd02cbee326c3e7da348845f'
);
const PAIR_CONTRACT = [
{
type: 'function',
name: 'getReserves',
outputs: [
{ name: 'reserve0', type: 'uint112' },
{ name: 'reserve1', type: 'uint112' },
{ name: 'blockTimestampLast', type: 'uint32' },
],
},
] as const;
export function create2(from: Uint8Array, salt: Uint8Array, initCodeHash: Uint8Array): string {
const cat = concatBytes(new Uint8Array([255]), from, salt, initCodeHash);
return ethHex.encode(keccak_256(cat).slice(12));
}
export function pairAddress(a: string, b: string, factory: string = FACTORY_ADDRESS): string {
// This is completely broken: '0x11' '0x11' will return '0x1111'. But this is how it works in sdk.
const data = concatBytes(...uni.sortTokens(a, b).map((i) => ethHex.decode(i)));
return create2(ethHex.decode(factory), keccak_256(data), INIT_CODE_HASH);
}
async function reserves(net: IWeb3Provider, a: string, b: string): Promise<[bigint, bigint]> {
a = uni.wrapContract(a);
b = uni.wrapContract(b);
const contract = createContract(PAIR_CONTRACT, net, pairAddress(a, b));
const res = await contract.getReserves.call();
return a < b ? [res.reserve0, res.reserve1] : [res.reserve1, res.reserve0];
}
// amountIn set: returns amountOut, how many tokenB user gets for amountIn of tokenA
// amountOut set: returns amountIn, how many tokenA user should send to get exact
// amountOut of tokenB
export function amount(
reserveIn: bigint,
reserveOut: bigint,
amountIn?: bigint,
amountOut?: bigint
): bigint {
if (amountIn && amountOut) throw new Error('uniswap.amount: provide only one amount');
if (!reserveIn || !reserveOut || (amountOut && amountOut >= reserveOut))
throw new Error('Uniswap: Insufficient reserves');
if (amountIn) {
const amountInWithFee = amountIn * BigInt(997);
const amountOut = (amountInWithFee * reserveOut) / (reserveIn * BigInt(1000) + amountInWithFee);
if (amountOut === BigInt(0) || amountOut >= reserveOut)
throw new Error('Uniswap: Insufficient reserves');
return amountOut;
} else if (amountOut)
return (
(reserveIn * amountOut * BigInt(1000)) / ((reserveOut - amountOut) * BigInt(997)) + BigInt(1)
);
else throw new Error('uniswap.amount: provide only one amount');
}
export type Path = { path: string[]; amountIn: bigint; amountOut: bigint };
async function bestPath(
net: IWeb3Provider,
tokenA: string,
tokenB: string,
amountIn?: bigint,
amountOut?: bigint
): Promise<Path> {
if ((amountIn && amountOut) || (!amountIn && !amountOut))
throw new Error('uniswap.bestPath: provide only one amount');
const wA = uni.wrapContract(tokenA);
const wB = uni.wrapContract(tokenB);
let resP: Promise<Path>[] = [];
// Direct pair
resP.push(
(async () => {
const pairAmount = amount(...(await reserves(net, tokenA, tokenB)), amountIn, amountOut);
return {
path: [wA, wB],
amountIn: amountIn ? amountIn : pairAmount,
amountOut: amountOut ? amountOut : pairAmount,
};
})()
);
const BASES: (ContractInfo & { contract: string })[] = uni.COMMON_BASES.filter(
(c) => c && c.contract && c.contract !== wA && c.contract !== wB
) as (ContractInfo & { contract: string })[];
for (let c of BASES) {
resP.push(
(async () => {
const [rAC, rCB] = await Promise.all([
reserves(net, wA, c.contract),
reserves(net, c.contract, wB),
]);
const path = [wA, c.contract, wB];
if (amountIn)
return { path, amountIn, amountOut: amount(...rCB, amount(...rAC, amountIn)) };
else if (amountOut) {
return {
path,
amountOut,
amountIn: amount(...rAC, undefined, amount(...rCB, undefined, amountOut)),
};
} else throw new Error('Impossible invariant');
})()
);
}
let res: Path[] = ((await uni.awaitDeep(resP, true)) as any).filter((i: Path) => !!i);
// biggest output or smallest input
res.sort((a, b) => Number(amountIn ? b.amountOut - a.amountOut : a.amountIn - b.amountIn));
if (!res.length) throw new Error('uniswap: cannot find path');
return res[0];
}
const ROUTER_CONTRACT = createContract(UNISWAP_V2_ROUTER, undefined, UNISWAP_V2_ROUTER_CONTRACT);
const TX_DEFAULT_OPT = {
...uni.DEFAULT_SWAP_OPT,
feeOnTransfer: false, // have no idea what it is
};
export function txData(
to: string,
input: string,
output: string,
path: Path,
amountIn?: bigint,
amountOut?: bigint,
opt: {
ttl: number;
deadline?: number;
slippagePercent: number;
feeOnTransfer: boolean;
} = TX_DEFAULT_OPT
): {
to: string;
value: bigint;
data: any;
allowance:
| {
token: string;
amount: bigint;
}
| undefined;
} {
opt = { ...TX_DEFAULT_OPT, ...opt };
if (!uni.isValidUniAddr(input) || !uni.isValidUniAddr(output) || !uni.isValidEthAddr(to))
throw new Error('Invalid address');
if (input === 'eth' && output === 'eth') throw new Error('Both input and output is ETH!');
if (input === 'eth' && path.path[0] !== uni.WETH)
throw new Error('Input is ETH but path starts with different contract');
if (output === 'eth' && path.path[path.path.length - 1] !== uni.WETH)
throw new Error('Output is ETH but path ends with different contract');
if ((amountIn && amountOut) || (!amountIn && !amountOut))
throw new Error('uniswap.txData: provide only one amount');
if (amountOut && opt.feeOnTransfer) throw new Error('Exact output + feeOnTransfer is impossible');
const method = ('swap' +
(amountIn ? 'Exact' : '') +
(input === 'eth' ? 'ETH' : 'Tokens') +
'For' +
(amountOut ? 'Exact' : '') +
(output === 'eth' ? 'ETH' : 'Tokens') +
(opt.feeOnTransfer ? 'SupportingFeeOnTransferTokens' : '')) as keyof typeof ROUTER_CONTRACT;
if (!(method in ROUTER_CONTRACT)) throw new Error('Invalid method');
const deadline = opt.deadline ? opt.deadline : Math.floor(Date.now() / 1000) + opt.ttl;
const amountInMax = uni.addPercent(path.amountIn, opt.slippagePercent);
const amountOutMin = uni.addPercent(path.amountOut, -opt.slippagePercent);
// TODO: remove any
const data = (ROUTER_CONTRACT as any)[method].encodeInput({
amountInMax,
amountOutMin,
amountIn,
amountOut,
to,
deadline,
path: path.path,
});
const amount = amountIn ? amountIn : amountInMax;
const value = input === 'eth' ? amount : BigInt(0);
const allowance = input === 'eth' ? undefined : { token: input, amount };
return { to: UNISWAP_V2_ROUTER_CONTRACT, value, data, allowance };
}
// Here goes Exchange API. Everything above is SDK. Supports almost everything from official sdk except liquidity stuff.
export default class UniswapV2 extends uni.UniswapAbstract {
name = 'Uniswap V2';
contract: string = UNISWAP_V2_ROUTER_CONTRACT;
bestPath(fromCoin: string, toCoin: string, inputAmount: bigint): Promise<Path> {
return bestPath(this.net, fromCoin, toCoin, inputAmount);
}
txData(
toAddress: string,
fromCoin: string,
toCoin: string,
path: any,
inputAmount?: bigint,
outputAmount?: bigint,
opt: uni.SwapOpt = uni.DEFAULT_SWAP_OPT
): any {
return txData(toAddress, fromCoin, toCoin, path, inputAmount, outputAmount, {
...TX_DEFAULT_OPT,
...opt,
});
}
}

229
dev/env/node_modules/micro-eth-signer/src/net/uniswap-v3.ts generated vendored Executable file
View File

@@ -0,0 +1,229 @@
import { concatBytes } from '@noble/hashes/utils';
import { type ContractInfo, createContract } from '../abi/decoder.ts';
import { default as UNISWAP_V3_ROUTER, UNISWAP_V3_ROUTER_CONTRACT } from '../abi/uniswap-v3.ts';
import { type IWeb3Provider, ethHex } from '../utils.ts';
import * as uni from './uniswap-common.ts';
const ADDRESS_ZERO = '0x0000000000000000000000000000000000000000';
const QUOTER_ADDRESS = '0xb27308f9F90D607463bb33eA1BeBb41C27CE5AB6';
const QUOTER_ABI = [
{
type: 'function',
name: 'quoteExactInput',
inputs: [
{ name: 'path', type: 'bytes' },
{ name: 'amountIn', type: 'uint256' },
],
outputs: [{ name: 'amountOut', type: 'uint256' }],
},
{
type: 'function',
name: 'quoteExactInputSingle',
inputs: [
{ name: 'tokenIn', type: 'address' },
{ name: 'tokenOut', type: 'address' },
{ name: 'fee', type: 'uint24' },
{ name: 'amountIn', type: 'uint256' },
{ name: 'sqrtPriceLimitX96', type: 'uint160' },
],
outputs: [{ name: 'amountOut', type: 'uint256' }],
},
{
type: 'function',
name: 'quoteExactOutput',
inputs: [
{ name: 'path', type: 'bytes' },
{ name: 'amountOut', type: 'uint256' },
],
outputs: [{ name: 'amountIn', type: 'uint256' }],
},
{
type: 'function',
name: 'quoteExactOutputSingle',
inputs: [
{ name: 'tokenIn', type: 'address' },
{ name: 'tokenOut', type: 'address' },
{ name: 'fee', type: 'uint24' },
{ name: 'amountOut', type: 'uint256' },
{ name: 'sqrtPriceLimitX96', type: 'uint160' },
],
outputs: [{ name: 'amountIn', type: 'uint256' }],
},
] as const;
export const Fee: Record<string, number> = {
LOW: 500,
MEDIUM: 3000,
HIGH: 10000,
};
type Route = { path?: Uint8Array; fee?: number; amountIn?: bigint; amountOut?: bigint; p?: any };
function basePaths(a: string, b: string, exactOutput: boolean = false) {
let res: Route[] = [];
for (let fee in Fee) res.push({ fee: Fee[fee], p: [a, b] });
const wA = uni.wrapContract(a);
const wB = uni.wrapContract(b);
const BASES: (ContractInfo & { contract: string })[] = uni.COMMON_BASES.filter(
(c) => c && c.contract && c.contract !== wA && c.contract !== wB
) as (ContractInfo & { contract: string })[];
const packFee = (n: string) => Fee[n].toString(16).padStart(6, '0');
for (let c of BASES) {
for (let fee1 in Fee) {
for (let fee2 in Fee) {
let path = [wA, packFee(fee1), c.contract, packFee(fee2), wB].map((i) => ethHex.decode(i));
if (exactOutput) path = path.reverse();
res.push({ path: concatBytes(...path) });
}
}
}
return res;
}
async function bestPath(
net: IWeb3Provider,
a: string,
b: string,
amountIn?: bigint,
amountOut?: bigint
) {
if ((amountIn && amountOut) || (!amountIn && !amountOut))
throw new Error('uniswapV3.bestPath: provide only one amount');
const quoter = createContract(QUOTER_ABI, net, QUOTER_ADDRESS);
let paths = basePaths(a, b, !!amountOut);
for (let i of paths) {
if (!i.path && !i.fee) continue;
const opt = { ...i, tokenIn: a, tokenOut: b, amountIn, amountOut, sqrtPriceLimitX96: 0 };
const method = 'quoteExact' + (amountIn ? 'Input' : 'Output') + (i.path ? '' : 'Single');
// TODO: remove any
i[amountIn ? 'amountOut' : 'amountIn'] = (quoter as any)[method].call(opt);
}
paths = (await uni.awaitDeep(paths, true)) as any;
paths = paths.filter((i) => i.amountIn || i.amountOut);
paths.sort((a: any, b: any) =>
Number(amountIn ? b.amountOut - a.amountOut : a.amountIn - b.amountIn)
);
if (!paths.length) throw new Error('uniswap: cannot find path');
return paths[0];
}
const ROUTER_CONTRACT = createContract(UNISWAP_V3_ROUTER, undefined, UNISWAP_V3_ROUTER_CONTRACT);
export type TxOpt = {
slippagePercent: number;
ttl: number;
sqrtPriceLimitX96?: bigint;
deadline?: number;
fee?: { fee: number; to: string };
};
export function txData(
to: string,
input: string,
output: string,
route: Route,
amountIn?: bigint,
amountOut?: bigint,
opt: TxOpt = uni.DEFAULT_SWAP_OPT
): {
to: string;
value: bigint;
data: Uint8Array;
allowance:
| {
token: string;
amount: bigint;
}
| undefined;
} {
opt = { ...uni.DEFAULT_SWAP_OPT, ...opt };
const err = 'Uniswap v3: ';
if (!uni.isValidUniAddr(input)) throw new Error(err + 'invalid input address');
if (!uni.isValidUniAddr(output)) throw new Error(err + 'invalid output address');
if (!uni.isValidEthAddr(to)) throw new Error(err + 'invalid to address');
if (opt.fee && !uni.isValidUniAddr(opt.fee.to))
throw new Error(err + 'invalid fee recepient addresss');
if (input === 'eth' && output === 'eth')
throw new Error(err + 'both input and output cannot be eth');
if ((amountIn && amountOut) || (!amountIn && !amountOut))
throw new Error(err + 'specify either amountIn or amountOut, but not both');
if (
(amountIn && !route.amountOut) ||
(amountOut && !route.amountIn) ||
(!route.fee && !route.path)
)
throw new Error(err + 'invalid route');
if (route.path && opt.sqrtPriceLimitX96)
throw new Error(err + 'sqrtPriceLimitX96 on multi-hop trade');
const deadline = opt.deadline || Math.floor(Date.now() / 1000);
// flags for whether funds should be send first to the router
const routerMustCustody = output === 'eth' || !!opt.fee;
// TODO: remove "as bigint"
let args = {
...route,
tokenIn: uni.wrapContract(input),
tokenOut: uni.wrapContract(output),
recipient: routerMustCustody ? ADDRESS_ZERO : to,
deadline,
amountIn: (amountIn || route.amountIn) as bigint,
amountOut: (amountOut || route.amountOut) as bigint,
sqrtPriceLimitX96: opt.sqrtPriceLimitX96 || BigInt(0),
amountInMaximum: undefined as bigint | undefined,
amountOutMinimum: undefined as bigint | undefined,
};
args.amountInMaximum = uni.addPercent(args.amountIn, opt.slippagePercent);
args.amountOutMinimum = uni.addPercent(args.amountOut, -opt.slippagePercent);
const method = ('exact' + (amountIn ? 'Input' : 'Output') + (!args.path ? 'Single' : '')) as
| 'exactInput'
| 'exactOutput'
| 'exactInputSingle'
| 'exactOutputSingle';
// TODO: remove unknown
const calldatas = [(ROUTER_CONTRACT[method].encodeInput as (v: unknown) => Uint8Array)(args)];
if (input === 'eth' && amountOut) calldatas.push(ROUTER_CONTRACT['refundETH'].encodeInput());
// unwrap
if (routerMustCustody) {
calldatas.push(
(ROUTER_CONTRACT as any)[
(output === 'eth' ? 'unwrapWETH9' : 'sweepToken') + (opt.fee ? 'WithFee' : '')
].encodeInput({
token: uni.wrapContract(output),
amountMinimum: args.amountOutMinimum,
recipient: to,
feeBips: opt.fee && opt.fee.fee * 10000,
feeRecipient: opt.fee && opt.fee.to,
})
);
}
const data =
calldatas.length === 1 ? calldatas[0] : ROUTER_CONTRACT['multicall'].encodeInput(calldatas);
const value = input === 'eth' ? (amountIn ? amountIn : args.amountInMaximum) : BigInt(0);
const allowance =
input !== 'eth'
? { token: input, amount: amountIn ? amountIn : args.amountInMaximum }
: undefined;
return { to: UNISWAP_V3_ROUTER_CONTRACT, value, data, allowance };
}
// Here goes Exchange API. Everything above is SDK.
export default class UniswapV3 extends uni.UniswapAbstract {
name = 'Uniswap V3';
contract: string = UNISWAP_V3_ROUTER_CONTRACT;
bestPath(fromCoin: string, toCoin: string, inputAmount: bigint): Promise<Route> {
return bestPath(this.net, fromCoin, toCoin, inputAmount);
}
txData(
toAddress: string,
fromCoin: string,
toCoin: string,
path: any,
inputAmount?: bigint,
outputAmount?: bigint,
opt: uni.SwapOpt = uni.DEFAULT_SWAP_OPT
): any {
return txData(toAddress, fromCoin, toCoin, path, inputAmount, outputAmount, {
...uni.DEFAULT_SWAP_OPT,
...opt,
});
}
}

3
dev/env/node_modules/micro-eth-signer/src/package.json generated vendored Executable file
View File

@@ -0,0 +1,3 @@
{
"type": "module"
}

104
dev/env/node_modules/micro-eth-signer/src/rlp.ts generated vendored Executable file
View File

@@ -0,0 +1,104 @@
import { numberToVarBytesBE } from '@noble/curves/abstract/utils';
import * as P from 'micro-packed';
import { isBytes } from './utils.ts';
// Spec-compliant RLP in 100 lines of code.
export type RLPInput = string | number | Uint8Array | bigint | RLPInput[] | null;
// length: first 3 bit !== 111 ? 6 bit length : 3bit lenlen
const RLPLength = P.wrap({
encodeStream(w: P.Writer, value: number) {
if (value < 56) return w.bits(value, 6);
w.bits(0b111, 3);
const length = P.U32BE.encode(value);
let pos = 0;
for (; pos < length.length; pos++) if (length[pos] !== 0) break;
w.bits(4 - pos - 1, 3);
w.bytes(length.slice(pos));
},
decodeStream(r: P.Reader): number {
const start = r.bits(3);
if (start !== 0b111) return (start << 3) | r.bits(3);
const len = r.bytes(r.bits(3) + 1);
for (let i = 0; i < len.length; i++) {
if (len[i]) break;
throw new Error('Wrong length encoding with leading zeros');
}
const res = P.int(len.length).decode(len);
if (res <= 55) throw new Error('RLPLength: less than 55, but used multi-byte flag');
return res;
},
});
// Recursive struct definition
export type InternalRLP =
| { TAG: 'byte'; data: number }
| {
TAG: 'complex';
data: { TAG: 'string'; data: Uint8Array } | { TAG: 'list'; data: InternalRLP[] };
};
const rlpInner = P.tag(P.map(P.bits(1), { byte: 0, complex: 1 }), {
byte: P.bits(7),
complex: P.tag(P.map(P.bits(1), { string: 0, list: 1 }), {
string: P.bytes(RLPLength),
list: P.prefix(
RLPLength,
P.array(
null,
P.lazy((): P.CoderType<InternalRLP> => rlpInner)
)
),
}),
});
const phex = P.hex(null);
const pstr = P.string(null);
const empty = Uint8Array.from([]);
/**
* RLP parser.
* Real type of rlp is `Item = Uint8Array | Item[]`.
* Strings/number encoded to Uint8Array, but not decoded back: type information is lost.
*/
export const RLP: P.CoderType<RLPInput> = P.apply(rlpInner, {
encode(from: InternalRLP): RLPInput {
if (from.TAG === 'byte') return new Uint8Array([from.data]);
if (from.TAG !== 'complex') throw new Error('RLP.encode: unexpected type');
const complex = from.data;
if (complex.TAG === 'string') {
if (complex.data.length === 1 && complex.data[0] < 128)
throw new Error('RLP.encode: wrong string length encoding, should use single byte mode');
return complex.data;
}
if (complex.TAG === 'list') return complex.data.map((i) => this.encode(i));
throw new Error('RLP.encode: unknown TAG');
},
decode(data: RLPInput): InternalRLP {
if (data == null) return this.decode(empty);
switch (typeof data) {
case 'object':
if (isBytes(data)) {
if (data.length === 1) {
const head = data[0];
if (head < 128) return { TAG: 'byte', data: head };
}
return { TAG: 'complex', data: { TAG: 'string', data: data } };
}
if (Array.isArray(data))
return { TAG: 'complex', data: { TAG: 'list', data: data.map((i) => this.decode(i)) } };
throw new Error('RLP.encode: unknown type');
case 'number':
if (data < 0) throw new Error('RLP.encode: invalid integer as argument, must be unsigned');
if (data === 0) return this.decode(empty);
return this.decode(numberToVarBytesBE(data));
case 'bigint':
if (data < BigInt(0))
throw new Error('RLP.encode: invalid integer as argument, must be unsigned');
return this.decode(numberToVarBytesBE(data));
case 'string':
return this.decode(data.startsWith('0x') ? phex.encode(data) : pstr.encode(data));
default:
throw new Error('RLP.encode: unknown type');
}
},
});

1786
dev/env/node_modules/micro-eth-signer/src/ssz.ts generated vendored Executable file

File diff suppressed because it is too large Load Diff

602
dev/env/node_modules/micro-eth-signer/src/tx.ts generated vendored Executable file
View File

@@ -0,0 +1,602 @@
import * as P from 'micro-packed';
import { addr } from './address.ts';
import { RLP } from './rlp.ts';
import { amounts, ethHex, isBytes, isObject } from './utils.ts';
// Transaction parsers
const _0n = BigInt(0);
export type AnyCoder = Record<string, P.Coder<any, any>>;
export type AnyCoderStream = Record<string, P.CoderType<any>>;
// EIP-2718 (very ambigious)
// new tx: [0, 0x7f]
// legacy: [0xc0, 0xfe]
// reserved: 0xff
type VersionType<V extends AnyCoderStream> = {
[K in keyof V]: { type: K; data: P.UnwrapCoder<V[K]> };
}[keyof V];
export type TxCoder<T extends TxType> = P.UnwrapCoder<(typeof TxVersions)[T]>;
const createTxMap = <T extends AnyCoderStream>(versions: T): P.CoderType<VersionType<T>> => {
const ent = Object.entries(versions);
// 'legacy' => {type, ver, coder}
const typeMap = Object.fromEntries(ent.map(([type, coder], ver) => [type, { type, ver, coder }]));
// '0' => {type, ver, coder}
const verMap = Object.fromEntries(ent.map(([type, coder], ver) => [ver, { type, ver, coder }]));
return P.wrap({
encodeStream(w: P.Writer, value: VersionType<T>) {
const t = value.type as string;
if (!typeMap.hasOwnProperty(t)) throw new Error(`txVersion: wrong type=${t}`);
const curr = typeMap[t];
if (t !== 'legacy') w.byte(curr.ver);
curr.coder.encodeStream(w, value.data);
},
decodeStream(r: P.Reader) {
const v = r.byte(true);
if (v === 0xff) throw new Error('reserved version 0xff');
// TODO: version=0 is legacy, but it is never wrapped in test vectors
if (v === 0x00) throw new Error('version=0 unsupported');
if (0 <= v && v <= 0x7f) {
if (!verMap.hasOwnProperty(v.toString())) throw new Error(`wrong version=${v}`);
const curr = verMap[v];
r.byte(false); // skip first byte
const d = curr.coder.decodeStream(r);
return { type: curr.type, data: d };
}
return { type: 'legacy', data: typeMap.legacy.coder.decodeStream(r) };
},
});
};
/**
* Static struct could have been extracted into micro-packed, but we need a specific behavior:
* - optional fields maybe either all present or all absent, enforced by type
* - optional fields change the length of underlying array
*/
const isOptBig = (a: unknown) => a === undefined || typeof a === 'bigint';
const isNullOr0 = (a: unknown) => a === undefined || a === BigInt(0);
function assertYParityValid(elm: number) {
// TODO: is this correct? elm = 0 default?
if (elm === undefined) elm = 0;
if (elm !== 0 && elm !== 1) throw new Error(`yParity wrong value=${elm} (${typeof elm})`);
}
// We don't know chainId when specific field coded yet.
const addrCoder = ethHex;
// Bytes32: VersionedHash, AccessListKey
function ensure32(b: Uint8Array): Uint8Array {
if (!isBytes(b) || b.length !== 32) throw new Error('expected 32 bytes');
return b;
}
const Bytes32: P.Coder<Uint8Array, string> = {
encode: (from) => ethHex.encode(ensure32(from)),
decode: (to) => ensure32(ethHex.decode(to)),
};
type VRS = Partial<{ v: bigint; r: bigint; s: bigint }>;
type YRS = Partial<{ chainId: bigint; yParity: number; r: bigint; s: bigint }>;
// Process v as (chainId, yParity) pair. Ethers.js-inspired logic:
// - v=27/28 -> no chainId (pre eip155)
// - r & s == 0 -> v = chainId
// Non-standard, but there is no other way to save chainId for unsignedTx.
// Case: unsigned tx for cold wallet for different chains, like mainnet & testnet.
// - otherwise v = yParity + 2*chainId + 35
// - allows to keep legacy logic here, instead of copying to Transaction
export const legacySig = {
encode: (data: VRS) => {
const { v, r, s } = data;
if (v === undefined) return { chainId: undefined };
// TODO: handle (invalid?) negative v
if (typeof v !== 'bigint') throw new Error(`invalid v type=${typeof v}`);
if ((r === undefined && s === undefined) || (r === _0n && s === _0n)) return { chainId: v };
if (v === BigInt(27)) return { yParity: 0, chainId: undefined, r, s };
if (v === BigInt(28)) return { yParity: 1, chainId: undefined, r, s };
if (v < BigInt(35)) throw new Error(`wrong v=${v}`);
const v2 = v - BigInt(35);
return { chainId: v2 >> BigInt(1), yParity: Number(v2 & BigInt(1)), r, s };
},
decode: (data: YRS) => {
aobj(data);
const { chainId, yParity, r, s } = data;
if (!isOptBig(chainId)) throw new Error(`wrong chainId type=${typeof chainId}`);
if (!isOptBig(r)) throw new Error(`wrong r type=${typeof r}`);
if (!isOptBig(s)) throw new Error(`wrong s type=${typeof s}`);
if (yParity !== undefined && typeof yParity !== 'number')
throw new Error(`wrong yParity type=${typeof chainId}`);
if (yParity === undefined) {
if (chainId !== undefined) {
if ((r !== undefined && r !== _0n) || (s !== undefined && s !== _0n))
throw new Error(`wrong unsigned legacy r=${r} s=${s}`);
return { v: chainId, r: _0n, s: _0n };
}
// no parity, chainId, but r, s exists
if ((r !== undefined && r !== _0n) || (s !== undefined && s !== _0n))
throw new Error(`wrong unsigned legacy r=${r} s=${s}`);
return {};
}
// parity exists, which means r & s should exist too!
if (isNullOr0(r) || isNullOr0(s)) throw new Error(`wrong unsigned legacy r=${r} s=${s}`);
assertYParityValid(yParity);
const v =
chainId !== undefined
? BigInt(yParity) + (chainId * BigInt(2) + BigInt(35))
: BigInt(yParity) + BigInt(27);
return { v, r, s };
},
} as P.Coder<VRS, YRS>;
const U64BE = P.coders.reverse(P.bigint(8, false, false, false));
const U256BE = P.coders.reverse(P.bigint(32, false, false, false));
// Small coder utils
// TODO: seems generic enought for packed? or RLP (seems useful for structured encoding/decoding of RLP stuff)
// Basic array coder
const array = <F, T>(coder: P.Coder<F, T>): P.Coder<F[], T[]> => ({
encode(from: F[]) {
if (!Array.isArray(from)) throw new Error('expected array');
return from.map((i) => coder.encode(i));
},
decode(to: T[]) {
if (!Array.isArray(to)) throw new Error('expected array');
return to.map((i) => coder.decode(i));
},
});
// tuple -> struct
const struct = <
Fields extends Record<string, P.Coder<any, any>>,
FromTuple extends {
[K in keyof Fields]: Fields[K] extends P.Coder<infer F, any> ? F : never;
}[keyof Fields][],
ToObject extends { [K in keyof Fields]: Fields[K] extends P.Coder<any, infer T> ? T : never },
>(
fields: Fields
): P.Coder<FromTuple, ToObject> => ({
encode(from: FromTuple) {
if (!Array.isArray(from)) throw new Error('expected array');
const fNames = Object.keys(fields);
if (from.length !== fNames.length) throw new Error('wrong array length');
return Object.fromEntries(fNames.map((f, i) => [f, fields[f].encode(from[i])])) as ToObject;
},
decode(to: ToObject): FromTuple {
const fNames = Object.keys(fields);
if (!isObject(to)) throw new Error('wrong struct object');
return fNames.map((i) => fields[i].decode(to[i])) as FromTuple;
},
});
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
const yParityCoder = P.coders.reverse(
P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
})
);
type CoderOutput<F> = F extends P.Coder<any, infer T> ? T : never;
const accessListItem: P.Coder<
(Uint8Array | Uint8Array[])[],
{
address: string;
storageKeys: string[];
}
> = struct({ address: addrCoder, storageKeys: array(Bytes32) });
export type AccessList = CoderOutput<typeof accessListItem>[];
export const authorizationRequest: P.Coder<
Uint8Array[],
{
chainId: bigint;
address: string;
nonce: bigint;
}
> = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
});
// [chain_id, address, nonce, y_parity, r, s]
const authorizationItem: P.Coder<
Uint8Array[],
{
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}
> = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
yParity: yParityCoder,
r: U256BE,
s: U256BE,
});
export type AuthorizationItem = CoderOutput<typeof authorizationItem>;
export type AuthorizationRequest = CoderOutput<typeof authorizationRequest>;
/**
* Field types, matching geth. Either u64 or u256.
*/
const coders = {
chainId: U256BE, // Can fit into u64 (curr max is 0x57a238f93bf), but geth uses bigint
nonce: U64BE,
gasPrice: U256BE,
maxPriorityFeePerGas: U256BE,
maxFeePerGas: U256BE,
gasLimit: U64BE,
to: addrCoder,
value: U256BE, // "Decimal" coder can be used, but it's harder to work with
data: ethHex,
accessList: array(accessListItem),
maxFeePerBlobGas: U256BE,
blobVersionedHashes: array(Bytes32),
yParity: yParityCoder,
v: U256BE,
r: U256BE,
s: U256BE,
authorizationList: array(authorizationItem),
};
type Coders = typeof coders;
type CoderName = keyof Coders;
const signatureFields = new Set(['v', 'yParity', 'r', 's'] as const);
type FieldType<T> = T extends P.Coder<any, infer U> ? U : T;
// Could be 'T | (T & O)', to make sure all partial fields either present or absent together
// But it would make accesing them impossible, because of typescript stuff:
type OptFields<T, O> = T & Partial<O>;
type FieldCoder<C> = P.CoderType<C> & {
fields: CoderName[];
optionalFields: CoderName[];
setOfAllFields: Set<CoderName | 'type'>;
};
// Mutates raw. Make sure to copy it in advance
export function removeSig(raw: TxCoder<any>): TxCoder<any> {
signatureFields.forEach((k) => {
delete raw[k];
});
return raw;
}
/**
* Defines RLP transaction with fields taken from `coders`.
* @example
* const tx = txStruct(['nonce', 'gasPrice', 'value'] as const, ['v', 'r', 's'] as const)
* tx.nonce.decode(...);
*/
const txStruct = <T extends readonly CoderName[], ST extends readonly CoderName[]>(
reqf: T,
optf: ST
): FieldCoder<
OptFields<{ [K in T[number]]: FieldType<Coders[K]> }, { [K in ST[number]]: FieldType<Coders[K]> }>
> => {
const allFields = reqf.concat(optf);
// Check that all fields have known coders
allFields.forEach((f) => {
if (!coders.hasOwnProperty(f)) throw new Error(`coder for field ${f} is not defined`);
});
const reqS = struct(Object.fromEntries(reqf.map((i) => [i, coders[i]])));
const allS = struct(Object.fromEntries(allFields.map((i) => [i, coders[i]])));
// e.g. eip1559 txs have valid lengths of 9 or 12 (unsigned / signed)
const reql = reqf.length;
const optl = reql + optf.length;
const optFieldAt = (i: number) => reql + i;
const isEmpty = (item: any & { length: number }) => item.length === 0;
// TX is a bunch of fields in specific order. Field like nonce must always be at the same index.
// We walk through all indexes in proper order.
const fcoder: any = P.wrap({
encodeStream(w, raw: Record<string, any>) {
// If at least one optional key is present, we add whole optional block
const hasOptional = optf.some((f) => raw.hasOwnProperty(f));
const sCoder = hasOptional ? allS : reqS;
RLP.encodeStream(w, sCoder.decode(raw));
},
decodeStream(r): Record<string, any> {
const decoded = RLP.decodeStream(r);
if (!Array.isArray(decoded)) throw new Error('txStruct: expected array from inner coder');
const length = decoded.length;
if (length !== reql && length !== optl)
throw new Error(`txStruct: wrong inner length=${length}`);
const sCoder = length === optl ? allS : reqS;
if (length === optl && optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
// @ts-ignore TODO: fix type (there can be null in RLP)
return sCoder.encode(decoded);
},
});
fcoder.fields = reqf;
fcoder.optionalFields = optf;
fcoder.setOfAllFields = new Set(allFields.concat(['type'] as any));
return fcoder;
};
// prettier-ignore
const legacyInternal: FieldCoder<OptFields<{
nonce: bigint;
gasPrice: bigint;
gasLimit: bigint;
to: string;
value: bigint;
data: string;
}, {
r: bigint;
s: bigint;
v: bigint;
}>> = txStruct([
'nonce', 'gasPrice', 'gasLimit', 'to', 'value', 'data'] as const,
['v', 'r', 's'] as const);
type LegacyInternal = P.UnwrapCoder<typeof legacyInternal>;
type Legacy = Omit<LegacyInternal, 'v'> & { chainId?: bigint; yParity?: number };
const legacy = (() => {
const res = P.apply(legacyInternal, {
decode: (data: Legacy) => Object.assign({}, data, legacySig.decode(data)),
encode: (data: LegacyInternal) => {
const res = Object.assign({}, data);
(res as any).chainId = undefined;
if (data.v) {
const newV = legacySig.encode(data);
removeSig(res);
Object.assign(res, newV);
}
return res as Legacy;
},
}) as FieldCoder<Legacy>;
res.fields = legacyInternal.fields.concat(['chainId'] as const);
// v, r, s -> yParity, r, s
// TODO: what about chainId?
res.optionalFields = ['yParity', 'r', 's'];
res.setOfAllFields = new Set(res.fields.concat(res.optionalFields, ['type'] as any));
return res;
})();
// prettier-ignore
const eip2930 = txStruct([
'chainId', 'nonce', 'gasPrice', 'gasLimit', 'to', 'value', 'data', 'accessList'] as const,
['yParity', 'r', 's'] as const);
// prettier-ignore
const eip1559 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList'] as const,
['yParity', 'r', 's'] as const);
// prettier-ignore
const eip4844 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList',
'maxFeePerBlobGas', 'blobVersionedHashes'] as const,
['yParity', 'r', 's'] as const);
// prettier-ignore
const eip7702 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList',
'authorizationList'] as const,
['yParity', 'r', 's'] as const);
export const TxVersions = {
legacy, // 0x00 (kinda)
eip2930, // 0x01
eip1559, // 0x02
eip4844, // 0x03
eip7702, // 0x04
};
export const RawTx = P.apply(createTxMap(TxVersions), {
// NOTE: we apply checksum to addresses here, since chainId is not available inside coders
// By construction 'to' field is decoded before anything about chainId is known
encode: (data) => {
data.data.to = addr.addChecksum(data.data.to, true);
if (data.type !== 'legacy' && data.data.accessList) {
for (const item of data.data.accessList) {
item.address = addr.addChecksum(item.address);
}
}
if (data.type === 'eip7702' && data.data.authorizationList) {
for (const item of data.data.authorizationList) {
item.address = addr.addChecksum(item.address);
}
}
return data;
},
// Nothing to check here, is validated in validator
decode: (data) => data,
});
/**
* Unchecked TX for debugging. Returns raw Uint8Array-s.
* Handles versions - plain RLP will crash on it.
*/
export const RlpTx: P.CoderType<{
type: string;
data: import('./rlp.js').RLPInput;
}> = createTxMap(Object.fromEntries(Object.keys(TxVersions).map((k) => [k, RLP])));
// Field-related utils
export type TxType = keyof typeof TxVersions;
// prettier-ignore
// Basically all numbers. Can be useful if we decide to do converter from hex here
// const knownFieldsNoLeading0 = [
// 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'value', 'yParity', 'r', 's'
// ] as const;
function abig(val: bigint) {
if (typeof val !== 'bigint') throw new Error('value must be bigint');
}
function aobj(val: Record<string, any>) {
if (typeof val !== 'object' || val == null) throw new Error('object expected');
}
function minmax(val: bigint, min: bigint, max: bigint, err?: string): void;
function minmax(val: number, min: number, max: number, err?: string): void;
function minmax(
val: number | bigint,
min: number | bigint,
max: number | bigint,
err?: string
): void {
if (!err) err = `>= ${min} and <= ${max}`;
if (Number.isNaN(val) || val < min || val > max) throw new Error(`must be ${err}, not ${val}`);
}
// strict=true validates if human-entered value in UI is "sort of" valid
// for some new TX. For example, it's unlikely that the nonce would be 14 million.
// strict=false validates if machine-entered value, or something historical is valid.
type ValidationOpts = { strict: boolean; type: TxType; data: Record<string, any> };
// NOTE: non-strict validators can be removed (RawTx will handle that), but errors will be less user-friendly.
// On other hand, we twice per sig because tx is immutable
// data passed for composite checks (gasLimit * maxFeePerGas overflow and stuff) [not implemented yet]
const validators: Record<string, (num: any, { strict, type, data }: ValidationOpts) => void> = {
nonce(num: bigint, { strict }: ValidationOpts) {
abig(num);
if (strict) minmax(num, _0n, amounts.maxNonce);
else minmax(BigInt(num), _0n, BigInt(Number.MAX_SAFE_INTEGER)); // amounts.maxUint64
},
maxFeePerGas(num: bigint, { strict }: ValidationOpts) {
abig(num);
if (strict) minmax(num, BigInt(1), amounts.maxGasPrice, '>= 1 wei and < 10000 gwei');
else minmax(num, _0n, amounts.maxUint64);
},
maxPriorityFeePerGas(num: bigint, { strict, data }: ValidationOpts) {
abig(num);
if (strict) minmax(num, _0n, amounts.maxGasPrice, '>= 1 wei and < 10000 gwei');
else minmax(num, _0n, amounts.maxUint64, '>= 1 wei and < 10000 gwei');
if (strict && data && typeof data.maxFeePerGas === 'bigint' && data.maxFeePerGas < num) {
throw new Error(`cannot be bigger than maxFeePerGas=${data.maxFeePerGas}`);
}
},
gasLimit(num: bigint, { strict }: ValidationOpts) {
abig(num);
if (strict) minmax(num, amounts.minGasLimit, amounts.maxGasLimit);
else minmax(num, _0n, amounts.maxUint64);
},
to(address: string, { strict, data }: ValidationOpts) {
if (!addr.isValid(address, true)) throw new Error('address checksum does not match');
if (strict && address === '0x' && !data.data)
throw new Error('Empty address (0x) without contract deployment code');
},
value(num: bigint) {
abig(num);
minmax(num, _0n, amounts.maxAmount, '>= 0 and < 100M eth');
},
data(val: string, { strict, data }: ValidationOpts) {
if (typeof val !== 'string') throw new Error('data must be string');
if (strict) {
if (val.length > amounts.maxDataSize) throw new Error('data is too big: ' + val.length);
}
// NOTE: data is hex here
if (data.to === '0x' && val.length > 2 * amounts.maxInitDataSize)
throw new Error(`initcode is too big: ${val.length}`);
},
chainId(num: bigint, { strict, type }: ValidationOpts) {
// chainId is optional for legacy transactions
if (type === 'legacy' && num === undefined) return;
abig(num);
if (strict) minmax(num, BigInt(1), amounts.maxChainId, '>= 1 and <= 2**32-1');
},
accessList(list: AccessList) {
// NOTE: we cannot handle this validation in coder, since it requires chainId to calculate correct checksum
for (const { address } of list) {
if (!addr.isValid(address)) throw new Error('address checksum does not match');
}
},
authorizationList(list: AuthorizationItem[], opts: ValidationOpts) {
for (const { address, nonce, chainId } of list) {
if (!addr.isValid(address)) throw new Error('address checksum does not match');
// chainId in authorization list can be zero (==allow any chain)
abig(chainId);
if (opts.strict) minmax(chainId, _0n, amounts.maxChainId, '>= 0 and <= 2**32-1');
this.nonce(nonce, opts);
}
},
};
// Validation
type ErrObj = { field: string; error: string };
export class AggregatedError extends Error {
message: string;
errors: ErrObj[];
constructor(message: string, errors: ErrObj[]) {
super();
this.message = message;
this.errors = errors;
}
}
export function validateFields(
type: TxType,
data: Record<string, any>,
strict = true,
allowSignatureFields = true
): void {
aobj(data);
if (!TxVersions.hasOwnProperty(type)) throw new Error(`unknown tx type=${type}`);
const txType = TxVersions[type];
const dataFields = new Set(Object.keys(data));
const dataHas = (field: string) => dataFields.has(field);
function checkField(field: CoderName) {
if (!dataHas(field))
return { field, error: `field "${field}" must be present for tx type=${type}` };
const val = data[field];
try {
if (validators.hasOwnProperty(field)) validators[field](val, { data, strict, type });
if (field === 'chainId') return; // chainId is validated, but can't be decoded
coders[field].decode(val as never); // decoding may throw an error
} catch (error) {
// No early-return: when multiple fields have error, we should show them all.
return { field, error: (error as Error).message };
}
return undefined;
}
// All fields are required.
const reqErrs = txType.fields.map(checkField);
// Signature fields should be all present or all missing
const optErrs = txType.optionalFields.some(dataHas) ? txType.optionalFields.map(checkField) : [];
// Check if user data has unexpected fields
const unexpErrs = Object.keys(data).map((field) => {
if (!txType.setOfAllFields.has(field as any))
return { field, error: `unknown field "${field}" for tx type=${type}` };
if (!allowSignatureFields && signatureFields.has(field as any))
return {
field,
error: `field "${field}" is sig-related and must not be user-specified`,
};
return;
});
const errors = (reqErrs as (ErrObj | undefined)[])
.concat(optErrs, unexpErrs)
.filter((val) => val !== undefined) as ErrObj[];
if (errors.length > 0) throw new AggregatedError('fields had validation errors', errors);
}
// prettier-ignore
const sortedFieldOrder = [
'to', 'value', 'nonce',
'maxFeePerGas', 'maxFeePerBlobGas', 'maxPriorityFeePerGas', 'gasPrice', 'gasLimit',
'accessList', 'authorizationList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'r', 's', 'yParity', 'v'
] as const;
// TODO: remove any
export function sortRawData(raw: TxCoder<any>): any {
const sortedRaw: Record<string, any> = {};
sortedFieldOrder
.filter((field) => raw.hasOwnProperty(field))
.forEach((field) => {
sortedRaw[field] = raw[field];
});
return sortedRaw;
}
export function decodeLegacyV(raw: TxCoder<any>): bigint | undefined {
return legacySig.decode(raw).v;
}
// NOTE: for tests only, don't use
export const __tests: any = { legacySig, TxVersions };

386
dev/env/node_modules/micro-eth-signer/src/typed-data.ts generated vendored Executable file
View File

@@ -0,0 +1,386 @@
import { keccak_256 } from '@noble/hashes/sha3';
import { concatBytes, hexToBytes, utf8ToBytes } from '@noble/hashes/utils';
import type { GetType as AbiGetType } from './abi/decoder.ts';
import { mapComponent } from './abi/decoder.ts';
import { addr } from './address.ts';
import { add0x, astr, ethHex, initSig, isObject, sign, strip0x, verify } from './utils.ts';
// EIP-191 signed data (https://eips.ethereum.org/EIPS/eip-191)
export type Hex = string | Uint8Array;
export interface TypedSigner<T> {
_getHash: (message: T) => string;
sign(message: T, privateKey: Hex, extraEntropy?: boolean | Uint8Array): string;
recoverPublicKey(signature: string, message: T): string;
verify(signature: string, message: T, address: string): boolean;
}
// 0x19 <1 byte version> <version specific data> <data to sign>.
// VERSIONS:
// - 0x19 <0x00> <intended validator address> <data to sign>
// - 0x19 <0x01> domainSeparator hashStruct(message)
// - 0x19 <0x45 (E)> <thereum Signed Message:\n" + len(message)> <data to sign>
function getSigner<T>(version: number, msgFn: (message: T) => Uint8Array): TypedSigner<T> {
if (version < 0 || version >= 256 || !Number.isSafeInteger(version))
throw new Error('Wrong version byte');
// bytes32 hash = keccak256(abi.encodePacked(byte(0x19), byte(0), address(this), msg.value, nonce, payload));
const getHash = (message: T) =>
keccak_256(concatBytes(new Uint8Array([0x19, version]), msgFn(message)));
// TODO: 'v' can contain non-undefined chainId, but not sure if it is used. If used, we need to check it with EIP-712 domain
return {
_getHash: (message: T) => ethHex.encode(getHash(message)),
sign(message: T, privateKey: Hex, extraEntropy: boolean | Uint8Array = true) {
const hash = getHash(message);
if (typeof privateKey === 'string') privateKey = ethHex.decode(privateKey);
const sig = sign(hash, privateKey, extraEntropy);
const end = sig.recovery === 0 ? '1b' : '1c';
return add0x(sig.toCompactHex() + end);
},
recoverPublicKey(signature: string, message: T) {
astr(signature);
const hash = getHash(message);
signature = strip0x(signature);
if (signature.length !== 65 * 2) throw new Error('invalid signature length');
const sigh = signature.slice(0, -2);
const end = signature.slice(-2);
if (!['1b', '1c'].includes(end)) throw new Error('invalid recovery bit');
const sig = initSig(hexToBytes(sigh), end === '1b' ? 0 : 1);
const pub = sig.recoverPublicKey(hash).toRawBytes(false);
if (!verify(sig, hash, pub)) throw new Error('invalid signature');
return addr.fromPublicKey(pub);
},
verify(signature: string, message: T, address: string): boolean {
const recAddr = this.recoverPublicKey(signature, message);
const low = recAddr.toLowerCase();
const upp = recAddr.toUpperCase();
if (address === low || address === upp) return true; // non-checksummed
return recAddr === address; // checksummed
},
};
}
// EIP-191/EIP-7749: 0x19 <0x00> <intended validator address> <data to sign>
// export const intendedValidator = getSigner(
// 0x00,
// ({ message, validator }: { message: Uint8Array; validator: string }) => {
// const { data } = addr.parse(validator);
// return concatBytes(hexToBytes(data), message);
// }
// );
// EIP-191: 0x19 <0x45 (E)> <thereum Signed Message:\n" + len(message)> <data to sign>
export const personal: TypedSigner<string | Uint8Array> = getSigner(
0x45,
(msg: string | Uint8Array) => {
if (typeof msg === 'string') msg = utf8ToBytes(msg);
return concatBytes(utf8ToBytes(`thereum Signed Message:\n${msg.length}`), msg);
}
);
// eip712 typed signed data on top of signed data (https://eips.ethereum.org/EIPS/eip-712)
// - V1: no domain, {name: string, type: string, value: any}[] - NOT IMPLEMENTED
// - V3: basic (no arrays and recursive stuff)
// - V4: V3 + support of arrays and recursive stuff
// TODO:
// https://eips.ethereum.org/EIPS/eip-4361: Off-chain authentication for Ethereum accounts to establish sessions
// There is two API for different usage-cases:
// - encodeData/signTyped, verifyTyped -> wallet like application, when we sign already constructed stuff ('web3.eth.personal.signTypedData')
// - encoder(type).encodeData/sign/verify -> if we construct data and want re-use types for different requests + type safety for static types.
// TODO: type is ABI type, but restricted
export type EIP712Component = { name: string; type: string };
export type EIP712Types = Record<string, readonly EIP712Component[]>;
// This makes 'bytes' -> Uint8Array, 'uint' -> bigint. However, we support 'string' for them (JSON in wallets),
// but for static types it is actually better to use strict types, since otherwise everything is 'string'. Address is string,
// but sending it in uint field can be mistake. Please open issue if you have use case where this behavior causes problems.
// prettier-ignore
type ProcessType<T extends string, Types extends EIP712Types> =
T extends `${infer Base}[]${infer Rest}` ? ProcessType<`${Base}${Rest}`, Types>[] : // 'string[]' -> 'string'[]
T extends `${infer Base}[${number}]${infer Rest}` ? ProcessType<`${Base}${Rest}`, Types>[] : // 'string[3]' -> 'string'[]
T extends keyof Types ? GetType<Types, T> | undefined : // recursive
AbiGetType<T>;
export type GetType<Types extends EIP712Types, K extends keyof Types & string> = {
[C in Types[K][number] as C['name']]: ProcessType<C['type'], Types>;
};
type Key<T extends EIP712Types> = keyof T & string;
// TODO: merge with abi somehow?
function parseType(s: string): {
base: string;
item: string;
type: string;
arrayLen: number | undefined;
isArray: boolean;
} {
let m = s.match(/^([^\[]+)(?:.*\[(.*?)\])?$/);
if (!m) throw new Error(`parseType: wrong type: ${s}`);
const base = m[1];
const isArray = m[2] !== undefined;
// TODO: check for safe integer
const arrayLen = m[2] !== undefined && m[2] !== '' ? Number(m[2]) : undefined;
if (arrayLen !== undefined && (!Number.isSafeInteger(arrayLen) || arrayLen.toString() !== m[2]))
throw new Error(`parseType: wrong array length: ${s}`);
let type = 'struct';
if (['string', 'bytes'].includes(base)) type = 'dynamic';
else if (['bool', 'address'].includes(base)) type = 'atomic';
else if ((m = /^(u?)int([0-9]+)?$/.exec(base))) {
const bits = m[2] ? +m[2] : 256;
if (!Number.isSafeInteger(bits) || bits <= 0 || bits % 8 !== 0 || bits > 256)
throw new Error('parseType: invalid numeric type');
type = 'atomic';
} else if ((m = /^bytes([0-9]{1,2})$/.exec(base))) {
const bytes = +m[1];
if (!bytes || bytes > 32) throw new Error(`parseType: wrong bytes<N=${bytes}> type`);
type = 'atomic';
}
const item = s.replace(/\[[^\]]*\]$/, '');
return { base, item, type, arrayLen, isArray };
}
// traverse dependency graph, find all transitive dependencies. Also, basic sanity check
function getDependencies(types: EIP712Types): Record<string, Set<string>> {
if (typeof types !== 'object' || types === null) throw new Error('wrong types object');
// Collect non-basic dependencies & sanity
const res: Record<string, Set<string>> = {};
for (const [name, fields] of Object.entries(types)) {
const cur: Set<string> = new Set(); // type may appear multiple times in struct
for (const { type } of fields) {
const p = parseType(type);
if (p.type !== 'struct') continue; // skip basic fields
if (p.base === name) continue; // self reference
if (!types[p.base]) throw new Error(`getDependencies: wrong struct type name=${type}`);
cur.add(p.base);
}
res[name] = cur;
}
// This should be more efficient with toposort + cycle detection, but I've already spent too much time here
// and for most cases there won't be a lot of types here anyway.
for (let changed = true; changed; ) {
changed = false;
for (const [name, curDeps] of Object.entries(res)) {
// Map here, because curDeps will change
const trDeps = Array.from(curDeps).map((i) => res[i]);
for (const d of trDeps) {
for (const td of d) {
if (td === name || curDeps.has(td)) continue;
curDeps.add(td);
changed = true;
}
}
}
}
return res;
}
function getTypes(types: EIP712Types) {
const deps = getDependencies(types);
const names: Record<string, string> = {};
// Build names
for (const type in types)
names[type] = `${type}(${types[type].map(({ name, type }) => `${type} ${name}`).join(',')})`;
const fullNames: Record<string, string> = {};
for (const [name, curDeps] of Object.entries(deps)) {
const n = [name].concat(Array.from(curDeps).sort());
fullNames[name] = n.map((i) => names[i]).join('');
}
const hashes = Object.fromEntries(Object.entries(fullNames).map(([k, v]) => [k, keccak_256(v)]));
// fields
const fields: Record<string, Set<string>> = {};
for (const type in types) {
const res: Set<string> = new Set();
for (const { name } of types[type]) {
if (res.has(name)) throw new Error(`field ${name} included multiple times in type ${type}`);
res.add(name);
}
fields[type] = res;
}
return { names, fullNames, hashes, fields };
}
// This re-uses domain per multiple requests, which is based on assumption that domain is static for different requests with
// different types. Please raise issue if you have different use case.
export function encoder<T extends EIP712Types>(types: T, domain: GetType<T, 'EIP712Domain'>) {
if (!isObject(domain)) throw Error(`wrong domain=${domain}`);
if (!isObject(types)) throw Error(`wrong types=${types}`);
const info = getTypes(types);
const encodeField = (type: string, data: any, withHash = true): Uint8Array => {
const p = parseType(type);
if (p.isArray) {
if (!Array.isArray(data)) throw new Error(`expected array, got: ${data}`);
if (p.arrayLen !== undefined && data.length !== p.arrayLen)
throw new Error(`wrong array length: expected ${p.arrayLen}, got ${data}`);
return keccak_256(concatBytes(...data.map((i) => encodeField(p.item, i))));
}
if (p.type === 'struct') {
const def = types[type];
if (!def) throw new Error(`wrong type: ${type}`);
const fieldNames = info.fields[type];
if (!isObject(data)) throw new Error(`encoding non-object as custom type ${type}`);
for (const k in data)
if (!fieldNames.has(k)) throw new Error(`unexpected field ${k} in ${type}`);
// TODO: use correct concatBytes (need to export from P?). This will easily crash with stackoverflow if too much fields.
const fields = [];
for (const { name, type } of def) {
// This is not mentioned in spec, but used in eth-sig-util
// Since there is no 'optional' fields inside eip712, it makes impossible to encode circular structure without arrays,
// but seems like other project use this.
// NOTE: this is V4 only stuff. If you need V3 behavior, please open issue.
if (types[type] && data[name] === undefined) {
fields.push(new Uint8Array(32));
continue;
}
fields.push(encodeField(type, data[name]));
}
const res = concatBytes(info.hashes[p.base], ...fields);
return withHash ? keccak_256(res) : res;
}
if (type === 'string' || type === 'bytes') {
if (type === 'bytes' && typeof data === 'string') data = ethHex.decode(data);
return keccak_256(data); // hashed as is!
}
// Type conversion is neccessary here, because we can get data from JSON (no Uint8Arrays/bigints).
if (type.startsWith('bytes') && typeof data === 'string') data = ethHex.decode(data);
if ((type.startsWith('int') || type.startsWith('uint')) && typeof data === 'string')
data = BigInt(data);
return mapComponent({ type }).encode(data);
};
const encodeData = <K extends Key<T>>(type: K, data: GetType<T, K>) => {
astr(type);
if (!types[type]) throw new Error(`Unknown type: ${type}`);
if (!isObject(data)) throw new Error('wrong data object');
return encodeField(type, data, false);
};
const structHash = (type: Key<T>, data: any) => keccak_256(encodeData(type, data));
const domainHash = structHash('EIP712Domain', domain);
// NOTE: we cannot use Msg here, since its already parametrized and everything will break.
const signer = getSigner(0x01, (msg: { primaryType: string; message: any }) => {
if (typeof msg.primaryType !== 'string') throw Error(`wrong primaryType=${msg.primaryType}`);
if (!isObject(msg.message)) throw Error(`wrong message=${msg.message}`);
if (msg.primaryType === 'EIP712Domain') return domainHash;
return concatBytes(domainHash, structHash(msg.primaryType, msg.message));
});
return {
encodeData: <K extends Key<T>>(type: K, message: GetType<T, K>): string =>
ethHex.encode(encodeData(type, message)),
structHash: <K extends Key<T>>(type: K, message: GetType<T, K>): string =>
ethHex.encode(structHash(type, message)),
// Signer
_getHash: <K extends Key<T>>(primaryType: K, message: GetType<T, K>): string =>
signer._getHash({ primaryType, message }),
sign: <K extends Key<T>>(
primaryType: K,
message: GetType<T, K>,
privateKey: Hex,
extraEntropy?: boolean | Uint8Array
): string => signer.sign({ primaryType, message }, privateKey, extraEntropy),
verify: <K extends Key<T>>(
primaryType: K,
signature: string,
message: GetType<T, K>,
address: string
): boolean => signer.verify(signature, { primaryType, message }, address),
recoverPublicKey: <K extends Key<T>>(
primaryType: K,
signature: string,
message: GetType<T, K>
): string => signer.recoverPublicKey(signature, { primaryType, message }),
};
}
export const EIP712Domain = [
{ name: 'name', type: 'string' }, // the user readable name of signing domain, i.e. the name of the DApp or the protocol.
{ name: 'version', type: 'string' }, // the current major version of the signing domain. Signatures from different versions are not compatible.
{ name: 'chainId', type: 'uint256' }, // the EIP-155 chain id. The user-agent should refuse signing if it does not match the currently active chain.
{ name: 'verifyingContract', type: 'address' }, // the address of the contract that will verify the signature. The user-agent may do contract specific phishing prevention.
{ name: 'salt', type: 'bytes32' }, // an disambiguating salt for the protocol. This can be used as a domain separator of last resort.
] as const;
export type DomainParams = typeof EIP712Domain;
const domainTypes = { EIP712Domain: EIP712Domain as DomainParams };
export type EIP712Domain = GetType<typeof domainTypes, 'EIP712Domain'>;
// Filter unused domain fields from type
export function getDomainType(domain: EIP712Domain) {
return EIP712Domain.filter(({ name }) => domain[name] !== undefined);
}
// Additional API without type safety for wallet-like applications
export type TypedData<T extends EIP712Types, K extends Key<T>> = {
types: T;
primaryType: K;
domain: GetType<T, 'EIP712Domain'>;
message: GetType<T, K>;
};
const getTypedTypes = <T extends EIP712Types, K extends Key<T>>(typed: TypedData<T, K>) => ({
EIP712Domain: getDomainType(typed.domain as any),
...typed.types,
});
function validateTyped<T extends EIP712Types, K extends Key<T>>(t: TypedData<T, K>) {
if (!isObject(t.message)) throw new Error('wrong message');
if (!isObject(t.domain)) throw new Error('wrong domain');
if (!isObject(t.types)) throw new Error('wrong types');
if (typeof t.primaryType !== 'string' || !t.types[t.primaryType])
throw new Error('wrong primaryType');
}
export function encodeData<T extends EIP712Types, K extends Key<T>>(
typed: TypedData<T, K>
): string {
validateTyped(typed);
return encoder(getTypedTypes(typed) as T, typed.domain).encodeData(
typed.primaryType,
typed.message
);
}
export function sigHash<T extends EIP712Types, K extends Key<T>>(typed: TypedData<T, K>): string {
validateTyped(typed);
return encoder(getTypedTypes(typed) as T, typed.domain)._getHash(
typed.primaryType,
typed.message
);
}
export function signTyped<T extends EIP712Types, K extends Key<T>>(
typed: TypedData<T, K>,
privateKey: Hex,
extraEntropy?: boolean | Uint8Array
): string {
validateTyped(typed);
return encoder(getTypedTypes(typed) as T, typed.domain).sign(
typed.primaryType,
typed.message,
privateKey,
extraEntropy
);
}
export function verifyTyped<T extends EIP712Types, K extends Key<T>>(
signature: string,
typed: TypedData<T, K>,
address: string
): boolean {
validateTyped(typed);
return encoder(getTypedTypes(typed) as T, typed.domain).verify(
typed.primaryType,
signature,
typed.message,
address
);
}
export function recoverPublicKeyTyped<T extends EIP712Types, K extends Key<T>>(
signature: string,
typed: TypedData<T, K>
): string {
return encoder(getTypedTypes(typed) as T, typed.domain).recoverPublicKey(
typed.primaryType,
signature,
typed.message
);
}
// Internal methods for test purposes only
export const _TEST: any = /* @__PURE__ */ { parseType, getDependencies, getTypes, encoder };

237
dev/env/node_modules/micro-eth-signer/src/utils.ts generated vendored Executable file
View File

@@ -0,0 +1,237 @@
import { secp256k1 } from '@noble/curves/secp256k1';
import { hexToBytes as _hexToBytes, isBytes as _isBytes, bytesToHex } from '@noble/hashes/utils';
import { type Coder, coders } from 'micro-packed';
export const isBytes: typeof _isBytes = _isBytes;
// There is no network code in the library.
// The types are used to check external network provider interfaces.
export type Web3CallArgs = Partial<{
to: string;
from: string;
data: string;
nonce: string;
value: string;
gas: string;
gasPrice: string;
tag: number | 'latest' | 'earliest' | 'pending';
}>;
export type IWeb3Provider = {
ethCall: (args: Web3CallArgs) => Promise<string>;
estimateGas: (args: Web3CallArgs) => Promise<bigint>;
call: (method: string, ...args: any[]) => Promise<any>;
};
const ETH_PRECISION = 18;
const GWEI_PRECISION = 9;
const GWEI = BigInt(10) ** BigInt(GWEI_PRECISION);
const ETHER = BigInt(10) ** BigInt(ETH_PRECISION);
export const amounts: {
GWEI_PRECISION: number;
ETH_PRECISION: number;
GWEI: bigint;
ETHER: bigint;
maxAmount: bigint;
minGasLimit: bigint;
maxGasLimit: bigint;
maxGasPrice: bigint;
maxNonce: bigint;
maxDataSize: number;
maxInitDataSize: number;
maxChainId: bigint;
maxUint64: bigint;
maxUint256: bigint;
} = /* @__PURE__ */ (() => ({
GWEI_PRECISION,
ETH_PRECISION,
GWEI,
ETHER,
// Disabled with "strict=false"
maxAmount: BigInt(1_000_000) * ETHER, // 1M ether for testnets
minGasLimit: BigInt(21_000), // 21K wei is used at minimum. Possibly smaller gas limit in 4844 txs?
maxGasLimit: BigInt(30_000_000), // 30M wei. A block limit in 2024 is 30M
maxGasPrice: BigInt(10_000) * GWEI, // 10K gwei. Arbitrage HFT bots can use more
maxNonce: BigInt(131_072), // 2**17, but in spec it's actually 2**64-1
maxDataSize: 1_000_000, // Size of .data field. TODO: research
maxInitDataSize: 49_152, // EIP-3860
maxChainId: BigInt(2 ** 32 - 1),
maxUint64: BigInt(2) ** BigInt(64) - BigInt(1),
maxUint256: BigInt(2) ** BigInt(256) - BigInt(1),
}))();
// For usage with other packed utils via apply
// This format is pretty much arbitrary:
// - '0x' vs '0x0' for empty
// - strip leading zero/don't
// - geth (https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-eth):
// 0x0,
// - etherscan (https://docs.etherscan.io/api-endpoints/logs):
// even 'data' can be '0x'
//
// 0x data = Uint8Array([])
// 0x num = BigInt(0)
const leadingZerosRe = /^0+/;
const genEthHex = (keepLeadingZero = true): Coder<Uint8Array, string> => ({
decode: (data: string): Uint8Array => {
if (typeof data !== 'string') throw new Error('hex data must be a string');
let hex = strip0x(data);
hex = hex.length & 1 ? `0${hex}` : hex;
return _hexToBytes(hex);
},
encode: (data: Uint8Array): string => {
let hex = bytesToHex(data);
if (!keepLeadingZero) hex = hex.replace(leadingZerosRe, '');
return add0x(hex);
},
});
export const ethHex: Coder<Uint8Array, string> = /* @__PURE__ */ genEthHex(true);
export const ethHexNoLeadingZero: Coder<Uint8Array, string> = /* @__PURE__ */ genEthHex(false);
const ethHexStartRe = /^0[xX]/;
export function add0x(hex: string): string {
return ethHexStartRe.test(hex) ? hex : `0x${hex}`;
}
export function strip0x(hex: string): string {
return hex.replace(ethHexStartRe, '');
}
export function numberTo0xHex(num: number | bigint): string {
const hex = num.toString(16);
const x2 = hex.length & 1 ? `0${hex}` : hex;
return add0x(x2);
}
export function hexToNumber(hex: string): bigint {
if (typeof hex !== 'string') throw new TypeError('expected hex string, got ' + typeof hex);
return hex ? BigInt(add0x(hex)) : BigInt(0);
}
export function isObject(item: unknown): item is Record<string, any> {
return item != null && typeof item === 'object';
}
export function astr(str: unknown): void {
if (typeof str !== 'string') throw new Error('string expected');
}
export function sign(
hash: Uint8Array,
privKey: Uint8Array,
extraEntropy: boolean | Uint8Array = true
) {
const sig = secp256k1.sign(hash, privKey, { extraEntropy: extraEntropy });
// yellow paper page 26 bans recovery 2 or 3
// https://ethereum.github.io/yellowpaper/paper.pdf
if ([2, 3].includes(sig.recovery)) throw new Error('invalid signature rec=2 or 3');
return sig;
}
export type RawSig = { r: bigint; s: bigint };
export type Sig = RawSig | Uint8Array;
function validateRaw(obj: Sig) {
if (isBytes(obj)) return true;
if (typeof obj === 'object' && obj && typeof obj.r === 'bigint' && typeof obj.s === 'bigint')
return true;
throw new Error('expected valid signature');
}
export function verify(sig: Sig, hash: Uint8Array, publicKey: Uint8Array) {
validateRaw(sig);
return secp256k1.verify(sig, hash, publicKey);
}
export function initSig(sig: Sig, bit: number) {
validateRaw(sig);
const s = isBytes(sig)
? secp256k1.Signature.fromCompact(sig)
: new secp256k1.Signature(sig.r, sig.s);
return s.addRecoveryBit(bit);
}
export function cloneDeep<T>(obj: T): T {
if (isBytes(obj)) {
return Uint8Array.from(obj) as T;
} else if (Array.isArray(obj)) {
return obj.map(cloneDeep) as unknown as T;
} else if (typeof obj === 'bigint') {
return BigInt(obj) as unknown as T;
} else if (typeof obj === 'object') {
// should be last, so it won't catch other types
let res: any = {};
// TODO: hasOwnProperty?
for (let key in obj) res[key] = cloneDeep(obj[key]);
return res;
} else return obj;
}
export function omit<T extends object, K extends Extract<keyof T, string>>(
obj: T,
...keys: K[]
): Omit<T, K> {
let res: any = Object.assign({}, obj);
for (let key of keys) delete res[key];
return res;
}
export function zip<A, B>(a: A[], b: B[]): [A, B][] {
let res: [A, B][] = [];
for (let i = 0; i < Math.max(a.length, b.length); i++) res.push([a[i], b[i]]);
return res;
}
export const createDecimal: (precision: number, round?: boolean) => Coder<bigint, string> =
coders.decimal;
export const weieth: Coder<bigint, string> = createDecimal(ETH_PRECISION);
export const weigwei: Coder<bigint, string> = createDecimal(GWEI_PRECISION);
// legacy. TODO: remove
export const ethDecimal = weieth satisfies typeof weieth as typeof weieth;
export const gweiDecimal = weigwei satisfies typeof weigwei as typeof weigwei;
export const formatters = {
// returns decimal that costs exactly $0.01 in given precision (using price)
// formatDecimal(perCentDecimal(prec, price), prec) * price == '0.01'
perCentDecimal(precision: number, price: number): bigint {
const fiatPrec = weieth;
//x * price = 0.01
//x = 0.01/price = 1/100 / price = 1/(100*price)
// float does not have enough precision
const totalPrice = fiatPrec.decode('' + price);
const centPrice = fiatPrec.decode('0.01') * BigInt(10) ** BigInt(precision);
return centPrice / totalPrice;
},
// TODO: what difference between decimal and this?!
// Used by 'fromWei' only
formatBigint(amount: bigint, base: bigint, precision: number, fixed = false): string {
const baseLength = base.toString().length;
const whole = (amount / base).toString();
let fraction = (amount % base).toString();
const zeros = '0'.repeat(Math.max(0, baseLength - fraction.length - 1));
fraction = `${zeros}${fraction}`;
const fractionWithoutTrailingZeros = fraction.replace(/0+$/, '');
const fractionAfterPrecision = (fixed ? fraction : fractionWithoutTrailingZeros).slice(
0,
precision
);
if (!fixed && (fractionAfterPrecision === '' || parseInt(fractionAfterPrecision, 10) === 0)) {
return whole;
}
// is same fraction?
const fr = (str: string) => str.replace(/0+$/, '');
const prefix =
BigInt(`1${fr(fractionAfterPrecision)}`) === BigInt(`1${fr(fraction)}`) ? '' : '~';
return `${prefix}${whole}.${fractionAfterPrecision}`;
},
fromWei(wei: string | number | bigint): string {
const GWEI = 10 ** 9;
const ETHER = BigInt(10) ** BigInt(ETH_PRECISION);
wei = BigInt(wei);
if (wei < BigInt(GWEI) / BigInt(10)) return wei + 'wei';
if (wei >= BigInt(GWEI) && wei < ETHER / BigInt(1000))
return formatters.formatBigint(wei, BigInt(GWEI), 9, false) + 'μeth';
return formatters.formatBigint(wei, ETHER, ETH_PRECISION, false) + 'eth';
},
};

669
dev/env/node_modules/micro-eth-signer/src/verkle.ts generated vendored Executable file
View File

@@ -0,0 +1,669 @@
import { precomputeMSMUnsafe } from '@noble/curves/abstract/curve';
import { type ExtPointType, twistedEdwards } from '@noble/curves/abstract/edwards';
import { Field, FpLegendre } from '@noble/curves/abstract/modular';
import { bytesToNumberBE, bytesToNumberLE, numberToBytesBE } from '@noble/curves/abstract/utils';
import { sha256 } from '@noble/hashes/sha256';
import { bytesToHex, concatBytes, hexToBytes, randomBytes, utf8ToBytes } from '@noble/hashes/utils';
import * as P from 'micro-packed';
import { ethHex } from './utils.ts';
const DOMAIN_SIZE = 256;
const DOMAIN_SIZE_LOG2 = Math.log2(DOMAIN_SIZE);
// 256 uses a lot of memory
// 5 - default? 500 ops -> 9k ops for getTreeKey
// 256 - uses a lot of memory (169mb), but 40 -> 6.8k ops for commitToScalars
const MSM_PRECOMPUTE_SMALL = 5;
const MSM_PRECOMPUTE_WINDOW = 8;
const MSM_PRECOMPUTE_2_SIZE = 8;
const TWO_POW_128 = BigInt(2) ** BigInt(128);
const Fp = Field(
BigInt('52435875175126190479447740508185965837690552500527637822603658699938581184513'),
undefined,
true
);
const Fr = Field(
BigInt('13108968793781547619861935127046491459309155893440570251786403306729687672801'),
undefined,
true
);
const bandersnatch = twistedEdwards({
Fp: Fp,
a: BigInt(-5),
d: BigInt('45022363124591815672509500913686876175488063829319466900776701791074614335719'),
n: BigInt('52435875175126190479447740508185965837690552500527637822603658699938581184513'),
h: BigInt(4),
Gx: BigInt('18886178867200960497001835917649091219057080094937609519140440539760939937304'),
Gy: BigInt('19188667384257783945677642223292697773471335439753913231509108946878080696678'),
hash: sha256,
randomBytes,
});
const { ExtendedPoint: Point } = bandersnatch;
type Point = typeof Point.BASE;
type Poly = bigint[];
const PolyZero = () => new Array(DOMAIN_SIZE).fill(Fr.ZERO);
function validateIndex(idx: number) {
if (!Number.isSafeInteger(idx) || idx < 0 || idx >= DOMAIN_SIZE)
throw new Error(`wrong index=${idx}`);
}
// Creates 32 byte scalar from smaller u8a
const extendScalar = (b: Uint8Array) => {
if (b.length > 32) throw new Error('scalar bytes bigger than 32 bytes');
const res = new Uint8Array(32);
res.set(b);
return res;
};
// == Commitment
const uncompressed = P.apply(P.struct({ x: P.U256LE, y: P.U256LE }), {
encode: Point.fromAffine,
decode: (x) => x.toAffine(),
});
const isPositive = (n: bigint) => n > Fp.neg(n);
// == serializedCommitment
const compressed = P.apply(P.U256BE, {
encode: (b) => {
const x = Fp.create(b);
const x2 = Fp.sqr(x);
const dx2 = Fp.sub(Fp.mul(bandersnatch.CURVE.d, x2), Fp.ONE); // dx^2-1
const ax2 = Fp.sub(Fp.mul(bandersnatch.CURVE.a, x2), Fp.ONE); // ax^2-1
const y = Fp.sqrt(Fp.div(ax2, dx2)); // sqrt((ax^2-1)/(dx^2-1))
const yRes = isPositive(y) ? y : Fp.neg(y);
const p = Point.fromAffine({ x, y: yRes });
p.assertValidity();
const t = Fp.sub(Fp.ONE, Fp.mul(bandersnatch.CURVE.a, Fp.sqr(x)));
const l = FpLegendre(Fp.ORDER)(Fp, t);
// Check if 1 - ax^2 is a quadratic residue
if (!Fp.eql(l, Fp.ONE)) throw new Error('subgroup check failed');
return p;
},
decode: (p) => {
const affine = p.toAffine();
return isPositive(affine.y) ? affine.x : Fp.neg(affine.x);
},
});
function splitHalf<T>(lst: T[]): [T[], T[]] {
const middle = Math.floor(lst.length / 2);
return [lst.slice(0, middle), lst.slice(middle)];
}
const multipointProof = P.struct({
D: compressed,
cl: P.array(DOMAIN_SIZE_LOG2, compressed),
cr: P.array(DOMAIN_SIZE_LOG2, compressed),
a: P.validate(P.U256LE, Fr.create),
});
type MultiProof = P.UnwrapCoder<typeof multipointProof>;
function generateCRSPoints(seed: string, points: number) {
const res = [];
const h = sha256.create().update(seed);
for (let i = 0; res.length < points; i++) {
const hash = h.clone().update(numberToBytesBE(i, 8)).digest();
const x = Fp.create(bytesToNumberBE(hash));
const xBytes = Fp.toBytes(x);
xBytes.reverse();
try {
res.push(compressed.decode(xBytes));
} catch (e) {}
}
return res;
}
// This is pedersen like hashes
const CRS_Q = Point.BASE;
let CRS_G: ExtPointType[];
let precomputed = false;
let CRS_G_PREC: any;
let CRS_G0_TREEKEY: any;
function precomputeOnFirstRun() {
if (precomputed) return;
CRS_G = generateCRSPoints('eth_verkle_oct_2021', DOMAIN_SIZE);
for (let i = 0; i < MSM_PRECOMPUTE_SMALL; i++)
bandersnatch.utils.precompute(MSM_PRECOMPUTE_WINDOW, CRS_G[i]);
CRS_G_PREC = precomputeMSMUnsafe(Point, Fr, CRS_G, MSM_PRECOMPUTE_2_SIZE);
CRS_G0_TREEKEY = CRS_G[0].multiplyUnsafe(BigInt(16386));
precomputed = true;
}
const crsMSM = (scalars: bigint[]) => {
precomputeOnFirstRun();
return CRS_G_PREC(scalars);
};
// Transcript
class Transcript {
state: Uint8Array[] = [];
constructor(label: string) {
this.domainSeparator(label);
}
domainSeparator(label: string): void {
this.state.push(utf8ToBytes(label));
}
private appendMessage(message: Uint8Array, label: string) {
this.domainSeparator(label);
this.state.push(message);
}
appendScalar(label: string, scalar: bigint): void {
this.appendMessage(Fr.toBytes(Fr.create(scalar)), label);
}
appendPoint(label: string, point: Point): void {
this.appendMessage(compressed.encode(point), label);
}
challengeScalar(label: string): bigint {
this.domainSeparator(label);
const scalar = Fr.create(Fr.fromBytes(sha256(concatBytes(...this.state))));
this.state = [];
this.appendScalar(label, scalar);
return scalar;
}
}
// /Transcript
function mapToField(p: Point) {
const { x, y } = p.toAffine();
return Fr.create(Fp.div(x, y));
}
function getBarycentricWeights(domainSize: number) {
const res = [];
for (let i = 0; i < domainSize; i++) {
const elm = Fr.create(BigInt(i));
let weight = Fr.ONE;
for (let j = 0; j < domainSize; j++) {
if (j === i) continue; // Skip the current domain element
weight = Fr.mul(weight, Fr.sub(elm, Fr.create(BigInt(j))));
}
res.push(weight);
}
return res;
}
function getInvertedWeights(domainSize: number) {
const res = [];
for (let i = 1; i < domainSize; i++) res.push(Fr.inv(Fr.create(BigInt(i))));
return res;
}
const WEIGTHS_BARYCENTRIC = getBarycentricWeights(DOMAIN_SIZE);
const WEIGTHS_BARYCENTRIC_INV = Fr.invertBatch(WEIGTHS_BARYCENTRIC);
const WEIGHTS_INVERTED = getInvertedWeights(DOMAIN_SIZE);
const WEIGHTS_INVERTED_NEG = WEIGHTS_INVERTED.map(Fr.neg);
function divideByLinearVanishing(poly: Poly, idx: number): bigint[] {
const q: bigint[] = new Array(poly.length).fill(Fr.ZERO);
const y = poly[idx];
for (let i = 0; i < poly.length; i++) {
if (i === idx) continue;
const den = i - idx;
const isNegative = den < 0;
const weights = isNegative ? WEIGHTS_INVERTED_NEG : WEIGHTS_INVERTED;
const denInv = weights[Math.abs(den) - 1];
const qi = Fr.mul(Fr.sub(poly[i], y), denInv);
q[i] = qi;
const weightRatio = Fr.mul(WEIGTHS_BARYCENTRIC[idx], WEIGTHS_BARYCENTRIC_INV[i]);
q[idx] = Fr.sub(q[idx], Fr.mul(weightRatio, qi));
}
return q;
}
function evaluateLagrangeCoefficients(point: bigint): bigint[] {
const res = [];
for (let i = 0; i < DOMAIN_SIZE; i++)
res.push(Fr.mul(WEIGTHS_BARYCENTRIC[i], Fr.sub(point, Fr.create(BigInt(i)))));
let az = Fr.ONE;
for (let i = 0; i < DOMAIN_SIZE; i++) az = Fr.mul(az, Fr.sub(point, Fr.create(BigInt(i))));
return Fr.invertBatch(res).map((i) => Fr.mul(i, az));
}
function innerProduct(a: bigint[], b: bigint[]): bigint {
let res = Fr.ZERO;
for (let i = 0; i < a.length; i++) res = Fr.add(res, Fr.mul(a[i], b[i]));
return res;
}
type VerifierQuery = {
commitment: Point;
point: number;
result: bigint;
};
type ProverQuery = VerifierQuery & { poly: Poly };
function multiproofR(transcript: Transcript, queries: (ProverQuery | VerifierQuery)[]) {
transcript.domainSeparator('multiproof');
for (const q of queries) {
transcript.appendPoint('C', q.commitment);
transcript.appendScalar('z', Fr.create(BigInt(q.point)));
transcript.appendScalar('y', q.result);
}
const r = transcript.challengeScalar('r');
const powers = [Fr.ONE];
for (let i = 1; i < queries.length; i++) powers.push(Fr.mul(powers[i - 1], r));
return powers;
}
function ipaW(transcript: Transcript, C: Point, input: bigint, output: bigint) {
transcript.domainSeparator('ipa');
transcript.appendPoint('C', C);
transcript.appendScalar('input point', input);
transcript.appendScalar('output point', output);
return transcript.challengeScalar('w');
}
function ipaX(transcript: Transcript, L: Point, R: Point) {
transcript.appendPoint('L', L);
transcript.appendPoint('R', R);
return transcript.challengeScalar('x');
}
function multiproofCheck(proof: MultiProof, queries: VerifierQuery[], transcript: Transcript) {
const powers = multiproofR(transcript, queries);
transcript.appendPoint('D', proof.D);
const t = transcript.challengeScalar('t');
const g2den = Fr.invertBatch(queries.map((q) => Fr.sub(t, Fr.create(BigInt(q.point)))));
const helperScalars = [];
for (let i = 0; i < powers.length; i++) helperScalars.push(Fr.mul(powers[i], g2den[i]));
let g2t = Fr.ZERO;
for (let i = 0; i < helperScalars.length; i++)
g2t = Fr.add(g2t, Fr.mul(helperScalars[i], queries[i].result));
const E = Point.msm(
queries.map((q) => q.commitment),
helperScalars
);
transcript.appendPoint('E', E);
const C = E.subtract(proof.D);
const b = evaluateLagrangeCoefficients(t);
// IPA
if (proof.cl.length !== DOMAIN_SIZE_LOG2 || proof.cr.length !== DOMAIN_SIZE_LOG2)
throw new Error('wrong cl/cr');
const w = ipaW(transcript, C, t, g2t);
const challenges = [];
for (let i = 0; i < DOMAIN_SIZE_LOG2; i++)
challenges.push(ipaX(transcript, proof.cl[i], proof.cr[i]));
const challengesInv = Fr.invertBatch(challenges);
const gi = [];
const bi = [];
for (let i = 0; i < DOMAIN_SIZE; i++) {
let b = Fr.neg(Fr.ONE);
for (let j = 0; j < DOMAIN_SIZE_LOG2; j++) {
if ((i >> (DOMAIN_SIZE_LOG2 - j - 1)) & 1) b = Fr.mul(b, challengesInv[j]);
}
bi.push(b);
gi.push(Fr.mul(proof.a, b));
}
const b0 = innerProduct(b, bi);
const qi = Fr.mul(w, Fr.add(g2t, Fr.mul(proof.a, b0)));
// TODO: this is fast only if we have precomputes, otherwise concat is better?
const tmp = crsMSM(gi);
const points = proof.cl.concat(proof.cr).concat([C, CRS_Q]);
const scalars = challenges.concat(challengesInv).concat([Fr.ONE, qi]);
return Point.msm(points, scalars).add(tmp).equals(Point.ZERO);
}
const scalarMulIndex = (bytes: Uint8Array, index: number) => {
precomputeOnFirstRun();
return uncompressed.encode(CRS_G[index].multiplyUnsafe(Fr.fromBytes(bytes)));
};
// EXPORT
export type Scalar = Uint8Array;
export type Commitment = Uint8Array;
export type ProverInput = {
serializedCommitment: Uint8Array;
vector: Uint8Array[];
indices: number[];
};
export type VerifierInput = {
serializedCommitment: Uint8Array;
indexValuePairs: { index: number; value: Uint8Array }[];
};
export const hashCommitment = (commitment: Uint8Array): Uint8Array =>
Fr.toBytes(mapToField(uncompressed.decode(commitment)));
export const commitToScalars = (vector: Uint8Array[]): Uint8Array => {
if (vector.length > DOMAIN_SIZE) throw new Error('vector length greater than DOMAIN_SIZE');
const scalars = vector.map(Fr.fromBytes);
return uncompressed.encode(crsMSM(scalars));
};
// TODO: implement optimization (batch inv inside mapToField)
export const hashCommitments = (commitments: Uint8Array[]): Uint8Array[] =>
commitments.map(hashCommitment);
export const getTreeKeyHash = (address: Uint8Array, treeIndexLE: Uint8Array): Uint8Array => {
if (address.length !== 32) throw new Error('Address must be 32 bytes');
if (treeIndexLE.length !== 32) throw new Error('Tree index must be 32 bytes');
precomputeOnFirstRun();
const P0 = CRS_G[1].multiplyUnsafe(bytesToNumberLE(address.subarray(0, 16)));
const P1 = CRS_G[2].multiplyUnsafe(bytesToNumberLE(address.subarray(16, 32)));
const P2 = CRS_G[3].multiplyUnsafe(bytesToNumberLE(treeIndexLE.subarray(0, 16)));
const P3 = CRS_G[4].multiplyUnsafe(bytesToNumberLE(treeIndexLE.subarray(16, 32)));
const acc = CRS_G0_TREEKEY.add(P0).add(P1).add(P2).add(P3);
return Fr.toBytes(mapToField(acc));
};
export const getTreeKey = (
address: Uint8Array,
treeIndex: Uint8Array,
subIndex: number
): Uint8Array => {
const keyHash = getTreeKeyHash(address, treeIndex);
keyHash[keyHash.length - 1] = subIndex;
return keyHash;
};
export const updateCommitment = (
commitment: Uint8Array,
commitmentIndex: number,
oldScalarValue: Uint8Array,
newScalarValue: Uint8Array
): Commitment => {
const oldCommitment = uncompressed.decode(commitment);
const delta = Fr.sub(Fr.fromBytes(newScalarValue), Fr.fromBytes(oldScalarValue));
precomputeOnFirstRun();
const deltaCommitment = CRS_G[commitmentIndex].multiplyUnsafe(delta);
return uncompressed.encode(oldCommitment.add(deltaCommitment));
};
export const zeroCommitment: Uint8Array = hexToBytes(
'00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000'
);
export const serializeCommitment = (commitment: Uint8Array): Uint8Array =>
compressed.encode(uncompressed.decode(commitment));
export const createProof = (proverInputs: ProverInput[]): Uint8Array => {
const proverQueries: ProverQuery[] = [];
for (const q of proverInputs) {
const commitment = compressed.decode(q.serializedCommitment);
const vector = q.vector.map((i) => {
const res = Fr.fromBytes(i);
if (!Fr.isValid(res)) throw new Error('invalid poly item');
return res;
});
for (const idx of q.indices) {
validateIndex(idx);
proverQueries.push({ commitment, poly: vector, point: idx, result: vector[idx] });
}
}
const transcript = new Transcript('verkle');
const powers = multiproofR(transcript, proverQueries);
// Aggregate queries
const aggQueries: Record<number, Poly> = {};
for (let i = 0; i < proverQueries.length; i++) {
const query = proverQueries[i];
const point = query.point;
if (!aggQueries[point]) aggQueries[point] = PolyZero();
const res = aggQueries[point];
for (let j = 0; j < DOMAIN_SIZE; j++) res[j] = Fr.add(res[j], Fr.mul(query.poly[j], powers[i]));
}
const aggPoints = Object.keys(aggQueries).map((i) => Fr.create(BigInt(i)));
const gx = PolyZero();
for (const [point, agg] of Object.entries(aggQueries)) {
const t = divideByLinearVanishing(agg, Number(point));
for (let i = 0; i < DOMAIN_SIZE; i++) gx[i] = Fr.add(gx[i], t[i]);
}
const D = crsMSM(gx);
transcript.appendPoint('D', D);
const t = transcript.challengeScalar('t');
const g1den = Fr.invertBatch(aggPoints.map((i) => Fr.sub(t, i)));
const g1x = PolyZero();
const aggPolys = Object.values(aggQueries);
for (let i = 0; i < aggPolys.length; i++) {
for (let j = 0; j < DOMAIN_SIZE; j++) g1x[j] = Fr.add(g1x[j], Fr.mul(g1den[i], aggPolys[i][j]));
}
const E = crsMSM(g1x);
transcript.appendPoint('E', E);
const C = E.subtract(D);
let b = evaluateLagrangeCoefficients(t);
const g3x = g1x.map((i, j) => Fr.sub(i, gx[j]));
let a = g3x;
//
let G = CRS_G;
if (a.length !== DOMAIN_SIZE || b.length !== DOMAIN_SIZE)
throw new Error('Wrong polynominals length');
// IPA
const w = ipaW(transcript, C, t, innerProduct(a, b));
const Q = CRS_Q.multiply(w);
const cl = [];
const cr = [];
for (let _k = 0; _k < DOMAIN_SIZE_LOG2; _k++) {
const [aL, aR] = splitHalf(a);
const [bL, bR] = splitHalf(b);
const [GL, GR] = splitHalf(G);
const zL = innerProduct(aR, bL);
const zR = innerProduct(aL, bR);
const L = Point.msm(GL.concat(Q), aR.concat(zL));
const R = Point.msm(GR.concat(Q), aL.concat(zR));
cl.push(L);
cr.push(R);
const x = ipaX(transcript, L, R);
// TODO: batch this?
const xInv = Fr.inv(x);
for (let i = 0; i < aL.length; i++) {
aL[i] = Fr.add(aL[i], Fr.mul(x, aR[i]));
bL[i] = Fr.add(bL[i], Fr.mul(xInv, bR[i]));
GL[i] = GL[i].add(GR[i].multiply(xInv));
}
a = aL;
b = bL;
G = GL;
}
return multipointProof.encode({ D, cl, cr, a: a[0] });
};
export const verifyProof = (proofBytes: Uint8Array, verifierInputs: VerifierInput[]): boolean => {
const verifierQueries: VerifierQuery[] = [];
for (const i of verifierInputs) {
const commitment = compressed.decode(i.serializedCommitment);
for (const { index, value } of i.indexValuePairs)
verifierQueries.push({ commitment, point: index, result: Fr.fromBytes(value) });
}
return multiproofCheck(
multipointProof.decode(proofBytes),
verifierQueries,
new Transcript('verkle')
);
};
const EXTPresent = {
None: 0,
DifferentStem: 1,
Present: 2,
} as const;
export function verifyExecutionWitnessPreState(
rootHex: string,
executionWitnessJson: string
): boolean {
let root: Point;
try {
root = compressed.decode(ethHex.decode(rootHex));
} catch (e) {
return false;
}
const executionWitness = JSON.parse(executionWitnessJson);
const stateDiffs = executionWitness.stateDiff.map((i: any) => ({
stem: ethHex.decode(i.stem),
suffixDiffs: i.suffixDiffs.map((i: any) => ({
suffix: i.suffix,
currentValue: i.currentValue ? ethHex.decode(i.currentValue) : undefined,
newValue: i.newValue ? ethHex.decode(i.newValue) : undefined,
})),
}));
const otherStems = executionWitness.verkleProof.otherStems.map(ethHex.decode);
const proof = {
d: ethHex.decode(executionWitness.verkleProof.d),
cl: executionWitness.verkleProof.ipaProof.cl.map(ethHex.decode),
cr: executionWitness.verkleProof.ipaProof.cr.map(ethHex.decode),
finalEvaluation: ethHex.decode(executionWitness.verkleProof.ipaProof.finalEvaluation),
};
const depthExtensionPresent = ethHex.decode(executionWitness.verkleProof.depthExtensionPresent);
const depths = [];
const extensionPresent = [];
for (const byte of depthExtensionPresent) {
extensionPresent.push(byte & 3);
depths.push(byte >> 3);
}
const addZ = (key: Uint8Array, z: number) => concatBytes(key, new Uint8Array([z]));
const keys = [];
const currentValues = [];
for (const sd of stateDiffs) {
const stem = sd.stem;
for (const diff of sd.suffixDiffs) {
keys.push(addZ(stem, diff.suffix));
currentValues.push(diff.currentValue);
}
}
const stemsSet = new Map<string, Uint8Array>();
for (const key of keys) {
const stem = key.slice(0, 31);
const stemHex = bytesToHex(stem);
if (!stemsSet.has(stemHex)) stemsSet.set(stemHex, stem);
}
const stems = Array.from(stemsSet.values());
const depthsAndExtByStem = new Map<string, [number, number]>();
const stemsWithExtension = new Map<string, Uint8Array>();
for (let i = 0; i < stems.length; i++) {
const stem = stems[i];
const extPres = extensionPresent[i];
const stemHex = bytesToHex(stem);
depthsAndExtByStem.set(stemHex, [extPres, depths[i]]);
if (extPres === EXTPresent.Present) stemsWithExtension.set(stemHex, stem);
}
const allPathsSet = new Set<string>();
const allPathsAndZsSet = new Map<string, [Uint8Array, number]>();
const leafValuesByPathAndZ = new Map<string, bigint>();
function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {
if (a.length !== b.length) return false;
for (let i = 0; i < a.length; ++i) if (a[i] !== b[i]) return false;
return true;
}
const addLeafValuesByPathAndZ = (path: Uint8Array, z: number, value: bigint) => {
leafValuesByPathAndZ.set(bytesToHex(addZ(path, z)), value);
};
const addAllPathsAndZsSet = (path: Uint8Array, z: number, value?: bigint) => {
allPathsSet.add(bytesToHex(path));
allPathsAndZsSet.set(bytesToHex(addZ(path, z)), [path, z]);
if (value !== undefined) leafValuesByPathAndZ.set(bytesToHex(addZ(path, z)), value);
};
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const value = currentValues[i];
const stem = key.slice(0, 31);
const stemHex = bytesToHex(stem);
const [extPres, depth] = depthsAndExtByStem.get(stemHex) || [undefined, undefined];
if (extPres === undefined || depth === undefined)
throw new Error(`Stem not found in depths and extensions map`);
for (let j = 0; j < depth; j++) addAllPathsAndZsSet(stem.subarray(0, j), stem[j]);
if (extPres === EXTPresent.DifferentStem || extPres === EXTPresent.Present) {
const path = stem.subarray(0, depth);
addAllPathsAndZsSet(path, 0, Fr.ONE);
addAllPathsAndZsSet(path, 1);
if (extPres === EXTPresent.Present) {
const suffix = key[31];
const openingIndex = suffix < 128 ? 2 : 3;
addAllPathsAndZsSet(path, openingIndex);
addLeafValuesByPathAndZ(path, 1, Fr.fromBytes(extendScalar(stem)));
const suffixPath = addZ(path, openingIndex);
const lowIdx = 2 * (suffix % 128);
addAllPathsAndZsSet(
suffixPath,
lowIdx,
value ? Fr.add(Fr.fromBytes(extendScalar(value.subarray(0, 16))), TWO_POW_128) : Fr.ZERO
);
addAllPathsAndZsSet(
suffixPath,
lowIdx + 1,
value ? Fr.fromBytes(extendScalar(value.subarray(16, 32))) : Fr.ZERO
);
} else if (extPres === EXTPresent.DifferentStem) {
if (value !== undefined) return false;
let otherStem = undefined;
const found = [];
for (const [_, stemValue] of stemsWithExtension) {
if (arraysEqual(stemValue.slice(0, depth), stem.slice(0, depth))) found.push(stemValue);
}
if (found.length > 1) {
throw new Error(
`Found more than one instance of stems with extension at depth ${depth}: ${found}`
);
} else if (found.length === 1) {
otherStem = found[0];
} else {
for (const diffStem of otherStems) {
if (arraysEqual(diffStem.slice(0, depth), stem.slice(0, depth))) {
otherStem = diffStem;
break;
}
}
if (!otherStem)
throw new Error(`ExtPresent::DifferentStem flag but cannot find the encountered stem`);
addLeafValuesByPathAndZ(path, 1, Fr.fromBytes(extendScalar(otherStem)));
}
}
} else if (extPres === EXTPresent.None) {
if (value !== undefined) return false;
addLeafValuesByPathAndZ(
depth === 1 ? new Uint8Array() : stem.slice(0, depth),
stem[depth - 1],
Fr.ZERO
);
}
}
// TODO: this seems broken?, we need to sort arrays
const commitmentsByPath2 = new Map<string, Point>();
const allPathsArray = Array.from(allPathsSet);
const commitmentsSortedByPath: Point[] = [
root,
...executionWitness.verkleProof.commitmentsByPath.map(ethHex.decode).map(compressed.decode),
];
if (commitmentsSortedByPath.length !== allPathsArray.length)
throw new Error('Mismatch between commitments and paths length');
for (let i = 0; i < allPathsArray.length; i++)
commitmentsByPath2.set(allPathsArray[i], commitmentsSortedByPath[i]);
const queries = [];
for (const [key, [path, z]] of allPathsAndZsSet) {
const commitment = commitmentsByPath2.get(bytesToHex(path)); //without z
if (!commitment) throw new Error('Commitment not found for the given path and z');
let y = leafValuesByPathAndZ.get(key);
if (y === undefined) {
const commitment = commitmentsByPath2.get(key); // with z
y = commitment ? mapToField(commitment) : Fr.ZERO;
}
queries.push({
path,
commitment,
point: z,
result: y,
});
}
queries.sort((a, b) => {
const minLength = Math.min(a.path.length, b.path.length);
for (let i = 0; i < minLength; i++) if (a.path[i] !== b.path[i]) return a.path[i] - b.path[i];
if (a.path.length !== b.path.length) return a.path.length - b.path.length;
return Number(a.point - b.point);
});
const multiproof = {
cl: proof.cl.map(compressed.decode),
cr: proof.cr.map(compressed.decode),
a: bytesToNumberBE(proof.finalEvaluation),
D: compressed.decode(proof.d),
};
return multiproofCheck(multiproof, queries, new Transcript('vt'));
}
// NOTE: for tests only, don't use
export const __tests: any = {
scalarMulIndex,
WEIGHTS_INVERTED,
WEIGTHS_BARYCENTRIC,
WEIGTHS_BARYCENTRIC_INV,
WEIGHTS_INVERTED_NEG,
bandersnatch,
evaluateLagrangeCoefficients,
divideByLinearVanishing,
Transcript,
};