refactor: move brother_node development artifact to dev/test-nodes subdirectory

Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
2026-03-30 17:09:06 +02:00
parent bf730dcb4a
commit 816e258d4c
11734 changed files with 2001707 additions and 0 deletions

21
dev/env/node_modules/@streamparser/json/src/index.ts generated vendored Executable file
View File

@@ -0,0 +1,21 @@
export { default as JSONParser, type JSONParserOptions } from "./jsonparser.js";
export {
default as Tokenizer,
type TokenizerOptions,
TokenizerError,
} from "./tokenizer.js";
export {
default as TokenParser,
type TokenParserOptions,
TokenParserError,
} from "./tokenparser.js";
export * as utf8 from "./utils/utf-8.js";
export * as JsonTypes from "./utils/types/jsonTypes.js";
export * as ParsedTokenInfo from "./utils/types/parsedTokenInfo.js";
export * as ParsedElementInfo from "./utils/types/parsedElementInfo.js";
export {
TokenParserMode,
type StackElement,
} from "./utils/types/stackElement.js";
export { default as TokenType } from "./utils/types/tokenType.js";

62
dev/env/node_modules/@streamparser/json/src/jsonparser.ts generated vendored Executable file
View File

@@ -0,0 +1,62 @@
import Tokenizer, { type TokenizerOptions } from "./tokenizer.js";
import TokenParser, { type TokenParserOptions } from "./tokenparser.js";
import type { ParsedElementInfo } from "./utils/types/parsedElementInfo.js";
import type { ParsedTokenInfo } from "./utils/types/parsedTokenInfo.js";
export interface JSONParserOptions
extends TokenizerOptions,
TokenParserOptions {}
export default class JSONParser {
private tokenizer: Tokenizer;
private tokenParser: TokenParser;
constructor(opts: JSONParserOptions = {}) {
this.tokenizer = new Tokenizer(opts);
this.tokenParser = new TokenParser(opts);
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
this.tokenizer.onEnd = () => {
if (!this.tokenParser.isEnded) this.tokenParser.end();
};
this.tokenParser.onError = this.tokenizer.error.bind(this.tokenizer);
this.tokenParser.onEnd = () => {
if (!this.tokenizer.isEnded) this.tokenizer.end();
};
}
public get isEnded(): boolean {
return this.tokenizer.isEnded && this.tokenParser.isEnded;
}
public write(input: Iterable<number> | string): void {
this.tokenizer.write(input);
}
public end(): void {
this.tokenizer.end();
}
public set onToken(cb: (parsedTokenInfo: ParsedTokenInfo) => void) {
this.tokenizer.onToken = (parsedToken) => {
cb(parsedToken);
this.tokenParser.write(parsedToken);
};
}
public set onValue(cb: (parsedElementInfo: ParsedElementInfo) => void) {
this.tokenParser.onValue = cb;
}
public set onError(cb: (err: Error) => void) {
this.tokenizer.onError = cb;
}
public set onEnd(cb: () => void) {
this.tokenParser.onEnd = () => {
if (!this.tokenizer.isEnded) this.tokenizer.end();
cb.call(this.tokenParser);
};
}
}

851
dev/env/node_modules/@streamparser/json/src/tokenizer.ts generated vendored Executable file
View File

@@ -0,0 +1,851 @@
import { charset, escapedSequences } from "./utils/utf-8.js";
import {
type StringBuilder,
NonBufferedString,
BufferedString,
} from "./utils/bufferedString.js";
import TokenType from "./utils/types/tokenType.js";
import type { ParsedTokenInfo } from "./utils/types/parsedTokenInfo.js";
// Tokenizer States
const enum TokenizerStates {
START,
ENDED,
ERROR,
TRUE1,
TRUE2,
TRUE3,
FALSE1,
FALSE2,
FALSE3,
FALSE4,
NULL1,
NULL2,
NULL3,
STRING_DEFAULT,
STRING_AFTER_BACKSLASH,
STRING_UNICODE_DIGIT_1,
STRING_UNICODE_DIGIT_2,
STRING_UNICODE_DIGIT_3,
STRING_UNICODE_DIGIT_4,
STRING_INCOMPLETE_CHAR,
NUMBER_AFTER_INITIAL_MINUS,
NUMBER_AFTER_INITIAL_ZERO,
NUMBER_AFTER_INITIAL_NON_ZERO,
NUMBER_AFTER_FULL_STOP,
NUMBER_AFTER_DECIMAL,
NUMBER_AFTER_E,
NUMBER_AFTER_E_AND_SIGN,
NUMBER_AFTER_E_AND_DIGIT,
SEPARATOR,
BOM_OR_START,
BOM,
}
function TokenizerStateToString(tokenizerState: TokenizerStates): string {
return [
"START",
"ENDED",
"ERROR",
"TRUE1",
"TRUE2",
"TRUE3",
"FALSE1",
"FALSE2",
"FALSE3",
"FALSE4",
"NULL1",
"NULL2",
"NULL3",
"STRING_DEFAULT",
"STRING_AFTER_BACKSLASH",
"STRING_UNICODE_DIGIT_1",
"STRING_UNICODE_DIGIT_2",
"STRING_UNICODE_DIGIT_3",
"STRING_UNICODE_DIGIT_4",
"STRING_INCOMPLETE_CHAR",
"NUMBER_AFTER_INITIAL_MINUS",
"NUMBER_AFTER_INITIAL_ZERO",
"NUMBER_AFTER_INITIAL_NON_ZERO",
"NUMBER_AFTER_FULL_STOP",
"NUMBER_AFTER_DECIMAL",
"NUMBER_AFTER_E",
"NUMBER_AFTER_E_AND_SIGN",
"NUMBER_AFTER_E_AND_DIGIT",
"SEPARATOR",
"BOM_OR_START",
"BOM",
][tokenizerState];
}
export interface TokenizerOptions {
stringBufferSize?: number;
numberBufferSize?: number;
separator?: string;
emitPartialTokens?: boolean;
}
const defaultOpts: TokenizerOptions = {
stringBufferSize: 0,
numberBufferSize: 0,
separator: undefined,
emitPartialTokens: false,
};
export class TokenizerError extends Error {
constructor(message: string) {
super(message);
// Typescript is broken. This is a workaround
Object.setPrototypeOf(this, TokenizerError.prototype);
}
}
export default class Tokenizer {
private state = TokenizerStates.BOM_OR_START;
private bom?: number[];
private bomIndex = 0;
private emitPartialTokens: boolean;
private separator?: string;
private separatorBytes?: Uint8Array;
private separatorIndex = 0;
private escapedCharsByteLength = 0;
private bufferedString: StringBuilder;
private bufferedNumber: StringBuilder;
private unicode?: string; // unicode escapes
private highSurrogate?: number;
private bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
private bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
private char_split_buffer = new Uint8Array(4); // for rebuilding chars split before boundary is reached
private encoder = new TextEncoder();
private offset = -1;
constructor(opts?: TokenizerOptions) {
opts = { ...defaultOpts, ...opts };
this.emitPartialTokens = opts.emitPartialTokens === true;
this.bufferedString =
opts.stringBufferSize && opts.stringBufferSize > 4
? new BufferedString(opts.stringBufferSize)
: new NonBufferedString();
this.bufferedNumber =
opts.numberBufferSize && opts.numberBufferSize > 0
? new BufferedString(opts.numberBufferSize)
: new NonBufferedString();
this.separator = opts.separator;
this.separatorBytes = opts.separator
? this.encoder.encode(opts.separator)
: undefined;
}
public get isEnded(): boolean {
return this.state === TokenizerStates.ENDED;
}
public write(input: Iterable<number> | string): void {
try {
let buffer: Uint8Array;
if (input instanceof Uint8Array) {
buffer = input;
} else if (typeof input === "string") {
buffer = this.encoder.encode(input);
} else if (Array.isArray(input)) {
buffer = Uint8Array.from(input);
} else if (ArrayBuffer.isView(input)) {
buffer = new Uint8Array(
input.buffer,
input.byteOffset,
input.byteLength,
);
} else {
throw new TypeError(
"Unexpected type. The `write` function only accepts Arrays, TypedArrays and Strings.",
);
}
for (let i = 0; i < buffer.length; i += 1) {
const n = buffer[i]; // get current byte from buffer
switch (this.state) {
// @ts-expect-error fall through case
case TokenizerStates.BOM_OR_START:
if (input instanceof Uint8Array && n === 0xef) {
this.bom = [0xef, 0xbb, 0xbf];
this.bomIndex += 1;
this.state = TokenizerStates.BOM;
continue;
}
if (input instanceof Uint16Array) {
if (n === 0xfe) {
this.bom = [0xfe, 0xff];
this.bomIndex += 1;
this.state = TokenizerStates.BOM;
continue;
}
if (n === 0xff) {
this.bom = [0xff, 0xfe];
this.bomIndex += 1;
this.state = TokenizerStates.BOM;
continue;
}
}
if (input instanceof Uint32Array) {
if (n === 0x00) {
this.bom = [0x00, 0x00, 0xfe, 0xff];
this.bomIndex += 1;
this.state = TokenizerStates.BOM;
continue;
}
if (n === 0xff) {
this.bom = [0xff, 0xfe, 0x00, 0x00];
this.bomIndex += 1;
this.state = TokenizerStates.BOM;
continue;
}
}
// eslint-disable-next-line no-fallthrough
case TokenizerStates.START:
this.offset += 1;
if (this.separatorBytes && n === this.separatorBytes[0]) {
if (this.separatorBytes.length === 1) {
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.SEPARATOR,
value: this.separator as string,
offset: this.offset + this.separatorBytes.length - 1,
});
continue;
}
this.state = TokenizerStates.SEPARATOR;
continue;
}
if (
n === charset.SPACE ||
n === charset.NEWLINE ||
n === charset.CARRIAGE_RETURN ||
n === charset.TAB
) {
// whitespace
continue;
}
if (n === charset.LEFT_CURLY_BRACKET) {
this.onToken({
token: TokenType.LEFT_BRACE,
value: "{",
offset: this.offset,
});
continue;
}
if (n === charset.RIGHT_CURLY_BRACKET) {
this.onToken({
token: TokenType.RIGHT_BRACE,
value: "}",
offset: this.offset,
});
continue;
}
if (n === charset.LEFT_SQUARE_BRACKET) {
this.onToken({
token: TokenType.LEFT_BRACKET,
value: "[",
offset: this.offset,
});
continue;
}
if (n === charset.RIGHT_SQUARE_BRACKET) {
this.onToken({
token: TokenType.RIGHT_BRACKET,
value: "]",
offset: this.offset,
});
continue;
}
if (n === charset.COLON) {
this.onToken({
token: TokenType.COLON,
value: ":",
offset: this.offset,
});
continue;
}
if (n === charset.COMMA) {
this.onToken({
token: TokenType.COMMA,
value: ",",
offset: this.offset,
});
continue;
}
if (n === charset.LATIN_SMALL_LETTER_T) {
this.state = TokenizerStates.TRUE1;
continue;
}
if (n === charset.LATIN_SMALL_LETTER_F) {
this.state = TokenizerStates.FALSE1;
continue;
}
if (n === charset.LATIN_SMALL_LETTER_N) {
this.state = TokenizerStates.NULL1;
continue;
}
if (n === charset.QUOTATION_MARK) {
this.bufferedString.reset();
this.escapedCharsByteLength = 0;
this.state = TokenizerStates.STRING_DEFAULT;
continue;
}
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
this.bufferedNumber.reset();
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
continue;
}
if (n === charset.DIGIT_ZERO) {
this.bufferedNumber.reset();
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
continue;
}
if (n === charset.HYPHEN_MINUS) {
this.bufferedNumber.reset();
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_MINUS;
continue;
}
break;
// STRING
case TokenizerStates.STRING_DEFAULT:
if (n === charset.QUOTATION_MARK) {
const string = this.bufferedString.toString();
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.STRING,
value: string,
offset: this.offset,
});
this.offset +=
this.escapedCharsByteLength +
this.bufferedString.byteLength +
1;
continue;
}
if (n === charset.REVERSE_SOLIDUS) {
this.state = TokenizerStates.STRING_AFTER_BACKSLASH;
continue;
}
if (n >= 128) {
// Parse multi byte (>=128) chars one at a time
if (n >= 194 && n <= 223) {
this.bytes_in_sequence = 2;
} else if (n <= 239) {
this.bytes_in_sequence = 3;
} else {
this.bytes_in_sequence = 4;
}
if (this.bytes_in_sequence <= buffer.length - i) {
// if bytes needed to complete char fall outside buffer length, we have a boundary split
this.bufferedString.appendBuf(
buffer,
i,
i + this.bytes_in_sequence,
);
i += this.bytes_in_sequence - 1;
continue;
}
this.bytes_remaining = i + this.bytes_in_sequence - buffer.length;
this.char_split_buffer.set(buffer.subarray(i));
i = buffer.length - 1;
this.state = TokenizerStates.STRING_INCOMPLETE_CHAR;
continue;
}
if (n >= charset.SPACE) {
this.bufferedString.appendChar(n);
continue;
}
break;
case TokenizerStates.STRING_INCOMPLETE_CHAR:
// check for carry over of a multi byte char split between data chunks
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
this.char_split_buffer.set(
buffer.subarray(i, i + this.bytes_remaining),
this.bytes_in_sequence - this.bytes_remaining,
);
this.bufferedString.appendBuf(
this.char_split_buffer,
0,
this.bytes_in_sequence,
);
i = this.bytes_remaining - 1;
this.state = TokenizerStates.STRING_DEFAULT;
continue;
case TokenizerStates.STRING_AFTER_BACKSLASH:
// eslint-disable-next-line no-case-declarations
const controlChar = escapedSequences[n];
if (controlChar) {
this.bufferedString.appendChar(controlChar);
this.escapedCharsByteLength += 1; // len(\")=2 minus the fact you're appending len(controlChar)=1
this.state = TokenizerStates.STRING_DEFAULT;
continue;
}
if (n === charset.LATIN_SMALL_LETTER_U) {
this.unicode = "";
this.state = TokenizerStates.STRING_UNICODE_DIGIT_1;
continue;
}
break;
case TokenizerStates.STRING_UNICODE_DIGIT_1:
case TokenizerStates.STRING_UNICODE_DIGIT_2:
case TokenizerStates.STRING_UNICODE_DIGIT_3:
if (
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
n <= charset.LATIN_CAPITAL_LETTER_F) ||
(n >= charset.LATIN_SMALL_LETTER_A &&
n <= charset.LATIN_SMALL_LETTER_F)
) {
this.unicode += String.fromCharCode(n);
this.state += 1;
continue;
}
break;
case TokenizerStates.STRING_UNICODE_DIGIT_4:
if (
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
n <= charset.LATIN_CAPITAL_LETTER_F) ||
(n >= charset.LATIN_SMALL_LETTER_A &&
n <= charset.LATIN_SMALL_LETTER_F)
) {
const intVal = parseInt(
this.unicode + String.fromCharCode(n),
16,
);
let unicodeString: string;
if (this.highSurrogate === undefined) {
if (intVal >= 0xd800 && intVal <= 0xdbff) {
//<55296,56319> - highSurrogate
this.highSurrogate = intVal;
this.state = TokenizerStates.STRING_DEFAULT;
continue;
} else {
unicodeString = String.fromCharCode(intVal);
}
} else {
if (intVal >= 0xdc00 && intVal <= 0xdfff) {
//<56320,57343> - lowSurrogate
unicodeString = String.fromCharCode(
this.highSurrogate,
intVal,
);
} else {
unicodeString = String.fromCharCode(this.highSurrogate);
}
this.highSurrogate = undefined;
}
const unicodeBuffer = this.encoder.encode(unicodeString);
this.bufferedString.appendBuf(unicodeBuffer);
// len(\u0000)=6 minus the fact you're appending len(buf)
this.escapedCharsByteLength += 6 - unicodeBuffer.byteLength;
this.state = TokenizerStates.STRING_DEFAULT;
continue;
}
break;
// Number
case TokenizerStates.NUMBER_AFTER_INITIAL_MINUS:
if (n === charset.DIGIT_ZERO) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
continue;
}
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
continue;
}
break;
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
if (n === charset.FULL_STOP) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
continue;
}
if (
n === charset.LATIN_SMALL_LETTER_E ||
n === charset.LATIN_CAPITAL_LETTER_E
) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_E;
continue;
}
i -= 1;
this.state = TokenizerStates.START;
this.emitNumber();
continue;
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
continue;
}
if (n === charset.FULL_STOP) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
continue;
}
if (
n === charset.LATIN_SMALL_LETTER_E ||
n === charset.LATIN_CAPITAL_LETTER_E
) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_E;
continue;
}
i -= 1;
this.state = TokenizerStates.START;
this.emitNumber();
continue;
case TokenizerStates.NUMBER_AFTER_FULL_STOP:
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_DECIMAL;
continue;
}
break;
case TokenizerStates.NUMBER_AFTER_DECIMAL:
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
continue;
}
if (
n === charset.LATIN_SMALL_LETTER_E ||
n === charset.LATIN_CAPITAL_LETTER_E
) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_E;
continue;
}
i -= 1;
this.state = TokenizerStates.START;
this.emitNumber();
continue;
// @ts-expect-error fall through case
case TokenizerStates.NUMBER_AFTER_E:
if (n === charset.PLUS_SIGN || n === charset.HYPHEN_MINUS) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_E_AND_SIGN;
continue;
}
// eslint-disable-next-line no-fallthrough
case TokenizerStates.NUMBER_AFTER_E_AND_SIGN:
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
this.state = TokenizerStates.NUMBER_AFTER_E_AND_DIGIT;
continue;
}
break;
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
this.bufferedNumber.appendChar(n);
continue;
}
i -= 1;
this.state = TokenizerStates.START;
this.emitNumber();
continue;
// TRUE
case TokenizerStates.TRUE1:
if (n === charset.LATIN_SMALL_LETTER_R) {
this.state = TokenizerStates.TRUE2;
continue;
}
break;
case TokenizerStates.TRUE2:
if (n === charset.LATIN_SMALL_LETTER_U) {
this.state = TokenizerStates.TRUE3;
continue;
}
break;
case TokenizerStates.TRUE3:
if (n === charset.LATIN_SMALL_LETTER_E) {
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.TRUE,
value: true,
offset: this.offset,
});
this.offset += 3;
continue;
}
break;
// FALSE
case TokenizerStates.FALSE1:
if (n === charset.LATIN_SMALL_LETTER_A) {
this.state = TokenizerStates.FALSE2;
continue;
}
break;
case TokenizerStates.FALSE2:
if (n === charset.LATIN_SMALL_LETTER_L) {
this.state = TokenizerStates.FALSE3;
continue;
}
break;
case TokenizerStates.FALSE3:
if (n === charset.LATIN_SMALL_LETTER_S) {
this.state = TokenizerStates.FALSE4;
continue;
}
break;
case TokenizerStates.FALSE4:
if (n === charset.LATIN_SMALL_LETTER_E) {
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.FALSE,
value: false,
offset: this.offset,
});
this.offset += 4;
continue;
}
break;
// NULL
case TokenizerStates.NULL1:
if (n === charset.LATIN_SMALL_LETTER_U) {
this.state = TokenizerStates.NULL2;
continue;
}
break;
case TokenizerStates.NULL2:
if (n === charset.LATIN_SMALL_LETTER_L) {
this.state = TokenizerStates.NULL3;
continue;
}
break;
case TokenizerStates.NULL3:
if (n === charset.LATIN_SMALL_LETTER_L) {
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.NULL,
value: null,
offset: this.offset,
});
this.offset += 3;
continue;
}
break;
case TokenizerStates.SEPARATOR:
this.separatorIndex += 1;
if (
!this.separatorBytes ||
n !== this.separatorBytes[this.separatorIndex]
) {
break;
}
if (this.separatorIndex === this.separatorBytes.length - 1) {
this.state = TokenizerStates.START;
this.onToken({
token: TokenType.SEPARATOR,
value: this.separator as string,
offset: this.offset + this.separatorIndex,
});
this.separatorIndex = 0;
}
continue;
// BOM support
case TokenizerStates.BOM:
if (n === this.bom![this.bomIndex]) {
if (this.bomIndex === this.bom!.length - 1) {
this.state = TokenizerStates.START;
this.bom = undefined;
this.bomIndex = 0;
continue;
}
this.bomIndex += 1;
continue;
}
break;
case TokenizerStates.ENDED:
if (
n === charset.SPACE ||
n === charset.NEWLINE ||
n === charset.CARRIAGE_RETURN ||
n === charset.TAB
) {
// whitespace
continue;
}
}
throw new TokenizerError(
`Unexpected "${String.fromCharCode(
n,
)}" at position "${i}" in state ${TokenizerStateToString(
this.state,
)}`,
);
}
if (this.emitPartialTokens) {
switch (this.state) {
case TokenizerStates.TRUE1:
case TokenizerStates.TRUE2:
case TokenizerStates.TRUE3:
this.onToken({
token: TokenType.TRUE,
value: true,
offset: this.offset,
partial: true,
});
break;
case TokenizerStates.FALSE1:
case TokenizerStates.FALSE2:
case TokenizerStates.FALSE3:
case TokenizerStates.FALSE4:
this.onToken({
token: TokenType.FALSE,
value: false,
offset: this.offset,
partial: true,
});
break;
case TokenizerStates.NULL1:
case TokenizerStates.NULL2:
case TokenizerStates.NULL3:
this.onToken({
token: TokenType.NULL,
value: null,
offset: this.offset,
partial: true,
});
break;
case TokenizerStates.STRING_DEFAULT: {
const string = this.bufferedString.toString();
this.onToken({
token: TokenType.STRING,
value: string,
offset: this.offset,
partial: true,
});
break;
}
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
case TokenizerStates.NUMBER_AFTER_DECIMAL:
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
try {
this.onToken({
token: TokenType.NUMBER,
value: this.parseNumber(this.bufferedNumber.toString()),
offset: this.offset,
partial: true,
});
} catch {
// Number couldn't be parsed. Do nothing.
}
}
}
} catch (err: unknown) {
this.error(err as Error);
}
}
private emitNumber(): void {
this.onToken({
token: TokenType.NUMBER,
value: this.parseNumber(this.bufferedNumber.toString()),
offset: this.offset,
});
this.offset += this.bufferedNumber.byteLength - 1;
}
protected parseNumber(numberStr: string): number {
return Number(numberStr);
}
public error(err: Error): void {
if (this.state !== TokenizerStates.ENDED) {
this.state = TokenizerStates.ERROR;
}
this.onError(err);
}
public end(): void {
switch (this.state) {
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
case TokenizerStates.NUMBER_AFTER_DECIMAL:
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
this.state = TokenizerStates.ENDED;
this.emitNumber();
this.onEnd();
break;
case TokenizerStates.BOM_OR_START:
case TokenizerStates.START:
case TokenizerStates.ERROR:
case TokenizerStates.SEPARATOR:
this.state = TokenizerStates.ENDED;
this.onEnd();
break;
default:
this.error(
new TokenizerError(
`Tokenizer ended in the middle of a token (state: ${TokenizerStateToString(
this.state,
)}). Either not all the data was received or the data was invalid.`,
),
);
}
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
public onToken(parsedToken: ParsedTokenInfo): void {
// Override me
throw new TokenizerError(
'Can\'t emit tokens before the "onToken" callback has been set up.',
);
}
public onError(err: Error): void {
// Override me
throw err;
}
public onEnd(): void {
// Override me
}
}

400
dev/env/node_modules/@streamparser/json/src/tokenparser.ts generated vendored Executable file
View File

@@ -0,0 +1,400 @@
import { charset } from "./utils/utf-8.js";
import TokenType from "./utils/types/tokenType.js";
import type {
JsonPrimitive,
JsonKey,
JsonObject,
JsonArray,
JsonStruct,
} from "./utils/types/jsonTypes.js";
import {
type StackElement,
TokenParserMode,
} from "./utils/types/stackElement.js";
import type { ParsedTokenInfo } from "./utils/types/parsedTokenInfo.js";
import type { ParsedElementInfo } from "./utils/types/parsedElementInfo.js";
// Parser States
const enum TokenParserState {
VALUE,
KEY,
COLON,
COMMA,
ENDED,
ERROR,
SEPARATOR,
}
function TokenParserStateToString(state: TokenParserState): string {
return ["VALUE", "KEY", "COLON", "COMMA", "ENDED", "ERROR", "SEPARATOR"][
state
];
}
export interface TokenParserOptions {
paths?: string[];
keepStack?: boolean;
separator?: string;
emitPartialValues?: boolean;
}
const defaultOpts: TokenParserOptions = {
paths: undefined,
keepStack: true,
separator: undefined,
emitPartialValues: false,
};
export class TokenParserError extends Error {
constructor(message: string) {
super(message);
// Typescript is broken. This is a workaround
Object.setPrototypeOf(this, TokenParserError.prototype);
}
}
export default class TokenParser {
private readonly paths?: (string[] | undefined)[];
private readonly keepStack: boolean;
private readonly separator?: string;
private state: TokenParserState = TokenParserState.VALUE;
private mode: TokenParserMode | undefined = undefined;
private key: JsonKey = undefined;
private value: JsonStruct | undefined = undefined;
private stack: StackElement[] = [];
constructor(opts?: TokenParserOptions) {
opts = { ...defaultOpts, ...opts };
if (opts.paths) {
this.paths = opts.paths.map((path) => {
if (path === undefined || path === "$*") return undefined;
if (!path.startsWith("$"))
throw new TokenParserError(
`Invalid selector "${path}". Should start with "$".`,
);
const pathParts = path.split(".").slice(1);
if (pathParts.includes(""))
throw new TokenParserError(
`Invalid selector "${path}". ".." syntax not supported.`,
);
return pathParts;
});
}
this.keepStack = opts.keepStack || false;
this.separator = opts.separator;
if (!opts.emitPartialValues) {
this.emitPartial = () => {};
}
}
private shouldEmit(): boolean {
if (!this.paths) return true;
return this.paths.some((path) => {
if (path === undefined) return true;
if (path.length !== this.stack.length) return false;
for (let i = 0; i < path.length - 1; i++) {
const selector = path[i];
const key = this.stack[i + 1].key;
if (selector === "*") continue;
if (selector !== key?.toString()) return false;
}
const selector = path[path.length - 1];
if (selector === "*") return true;
return selector === this.key?.toString();
});
}
private push(): void {
this.stack.push({
key: this.key,
value: this.value as JsonStruct,
mode: this.mode,
emit: this.shouldEmit(),
});
}
private pop(): void {
const value = this.value;
let emit;
({
key: this.key,
value: this.value,
mode: this.mode,
emit,
} = this.stack.pop() as StackElement);
this.state =
this.mode !== undefined ? TokenParserState.COMMA : TokenParserState.VALUE;
this.emit(value as JsonPrimitive | JsonStruct, emit);
}
private emit(value: JsonPrimitive | JsonStruct, emit: boolean): void {
if (
!this.keepStack &&
this.value &&
this.stack.every((item) => !item.emit)
) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
delete (this.value as JsonStruct as any)[this.key as string | number];
}
if (emit) {
this.onValue({
value: value,
key: this.key,
parent: this.value,
stack: this.stack,
});
}
if (this.stack.length === 0) {
if (this.separator) {
this.state = TokenParserState.SEPARATOR;
} else if (this.separator === undefined) {
this.end();
}
// else if separator === '', expect next JSON object.
}
}
private emitPartial(value?: JsonPrimitive): void {
if (!this.shouldEmit()) return;
if (this.state === TokenParserState.KEY) {
this.onValue({
value: undefined,
key: value as JsonKey,
parent: this.value,
stack: this.stack,
partial: true,
});
return;
}
this.onValue({
value: value,
key: this.key,
parent: this.value,
stack: this.stack,
partial: true,
});
}
public get isEnded(): boolean {
return this.state === TokenParserState.ENDED;
}
public write({
token,
value,
partial,
}: Omit<ParsedTokenInfo, "offset">): void {
try {
if (partial) {
this.emitPartial(value);
return;
}
if (this.state === TokenParserState.VALUE) {
if (
token === TokenType.STRING ||
token === TokenType.NUMBER ||
token === TokenType.TRUE ||
token === TokenType.FALSE ||
token === TokenType.NULL
) {
if (this.mode === TokenParserMode.OBJECT) {
(this.value as JsonObject)[this.key as string] = value;
this.state = TokenParserState.COMMA;
} else if (this.mode === TokenParserMode.ARRAY) {
(this.value as JsonArray).push(value);
this.state = TokenParserState.COMMA;
}
this.emit(value, this.shouldEmit());
return;
}
if (token === TokenType.LEFT_BRACE) {
this.push();
if (this.mode === TokenParserMode.OBJECT) {
this.value = (this.value as JsonObject)[this.key as string] = {};
} else if (this.mode === TokenParserMode.ARRAY) {
const val = {};
(this.value as JsonArray).push(val);
this.value = val;
} else {
this.value = {};
}
this.mode = TokenParserMode.OBJECT;
this.state = TokenParserState.KEY;
this.key = undefined;
this.emitPartial();
return;
}
if (token === TokenType.LEFT_BRACKET) {
this.push();
if (this.mode === TokenParserMode.OBJECT) {
this.value = (this.value as JsonObject)[this.key as string] = [];
} else if (this.mode === TokenParserMode.ARRAY) {
const val: JsonArray = [];
(this.value as JsonArray).push(val);
this.value = val;
} else {
this.value = [];
}
this.mode = TokenParserMode.ARRAY;
this.state = TokenParserState.VALUE;
this.key = 0;
this.emitPartial();
return;
}
if (
this.mode === TokenParserMode.ARRAY &&
token === TokenType.RIGHT_BRACKET &&
(this.value as JsonArray).length === 0
) {
this.pop();
return;
}
}
if (this.state === TokenParserState.KEY) {
if (token === TokenType.STRING) {
this.key = value as string;
this.state = TokenParserState.COLON;
this.emitPartial();
return;
}
if (
token === TokenType.RIGHT_BRACE &&
Object.keys(this.value as JsonObject).length === 0
) {
this.pop();
return;
}
}
if (this.state === TokenParserState.COLON) {
if (token === TokenType.COLON) {
this.state = TokenParserState.VALUE;
return;
}
}
if (this.state === TokenParserState.COMMA) {
if (token === TokenType.COMMA) {
if (this.mode === TokenParserMode.ARRAY) {
this.state = TokenParserState.VALUE;
(this.key as number) += 1;
return;
}
/* istanbul ignore else */
if (this.mode === TokenParserMode.OBJECT) {
this.state = TokenParserState.KEY;
return;
}
}
if (
(token === TokenType.RIGHT_BRACE &&
this.mode === TokenParserMode.OBJECT) ||
(token === TokenType.RIGHT_BRACKET &&
this.mode === TokenParserMode.ARRAY)
) {
this.pop();
return;
}
}
if (this.state === TokenParserState.SEPARATOR) {
if (token === TokenType.SEPARATOR && value === this.separator) {
this.state = TokenParserState.VALUE;
return;
}
}
// Edge case in which the separator is just whitespace and it's found in the middle of the JSON
if (
token === TokenType.SEPARATOR &&
this.state !== TokenParserState.SEPARATOR &&
Array.from(value as string)
.map((n) => n.charCodeAt(0))
.every(
(n) =>
n === charset.SPACE ||
n === charset.NEWLINE ||
n === charset.CARRIAGE_RETURN ||
n === charset.TAB,
)
) {
// whitespace
return;
}
throw new TokenParserError(
`Unexpected ${TokenType[token]} (${JSON.stringify(
value,
)}) in state ${TokenParserStateToString(this.state)}`,
);
} catch (err: unknown) {
this.error(err as Error);
}
}
public error(err: Error): void {
if (this.state !== TokenParserState.ENDED) {
this.state = TokenParserState.ERROR;
}
this.onError(err);
}
public end(): void {
if (
(this.state !== TokenParserState.VALUE &&
this.state !== TokenParserState.SEPARATOR) ||
this.stack.length > 0
) {
this.error(
new Error(
`Parser ended in mid-parsing (state: ${TokenParserStateToString(
this.state,
)}). Either not all the data was received or the data was invalid.`,
),
);
} else {
this.state = TokenParserState.ENDED;
this.onEnd();
}
}
/* eslint-disable-next-line @typescript-eslint/no-unused-vars */
public onValue(parsedElementInfo: ParsedElementInfo): void {
// Override me
throw new TokenParserError(
'Can\'t emit data before the "onValue" callback has been set up.',
);
}
public onError(err: Error): void {
// Override me
throw err;
}
public onEnd(): void {
// Override me
}
}

View File

@@ -0,0 +1,75 @@
export interface StringBuilder {
byteLength: number;
appendChar: (char: number) => void;
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
reset: () => void;
toString: () => string;
}
export class NonBufferedString implements StringBuilder {
private decoder = new TextDecoder("utf-8");
private strings: string[] = [];
public byteLength = 0;
public appendChar(char: number): void {
this.strings.push(String.fromCharCode(char));
this.byteLength += 1;
}
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
this.strings.push(this.decoder.decode(buf.subarray(start, end)));
this.byteLength += end - start;
}
public reset(): void {
this.strings = [];
this.byteLength = 0;
}
public toString(): string {
return this.strings.join("");
}
}
export class BufferedString implements StringBuilder {
private decoder = new TextDecoder("utf-8");
private buffer: Uint8Array;
private bufferOffset = 0;
private string = "";
public byteLength = 0;
public constructor(bufferSize: number) {
this.buffer = new Uint8Array(bufferSize);
}
public appendChar(char: number): void {
if (this.bufferOffset >= this.buffer.length) this.flushStringBuffer();
this.buffer[this.bufferOffset++] = char;
this.byteLength += 1;
}
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
const size = end - start;
if (this.bufferOffset + size > this.buffer.length) this.flushStringBuffer();
this.buffer.set(buf.subarray(start, end), this.bufferOffset);
this.bufferOffset += size;
this.byteLength += size;
}
private flushStringBuffer(): void {
this.string += this.decoder.decode(
this.buffer.subarray(0, this.bufferOffset),
);
this.bufferOffset = 0;
}
public reset(): void {
this.string = "";
this.bufferOffset = 0;
this.byteLength = 0;
}
public toString(): string {
this.flushStringBuffer();
return this.string;
}
}

View File

@@ -0,0 +1,5 @@
export type JsonPrimitive = string | number | boolean | null;
export type JsonKey = string | number | undefined;
export type JsonObject = { [key: string]: JsonPrimitive | JsonStruct };
export type JsonArray = (JsonPrimitive | JsonStruct)[];
export type JsonStruct = JsonObject | JsonArray;

View File

@@ -0,0 +1,37 @@
import type { StackElement } from "./stackElement.js";
import type {
JsonPrimitive,
JsonKey,
JsonObject,
JsonArray,
JsonStruct,
} from "./jsonTypes.js";
export interface ParsedElementInfo {
value?: JsonPrimitive | JsonStruct;
parent?: JsonStruct;
key?: JsonKey;
stack: StackElement[];
partial?: boolean;
}
export interface ParsedArrayElement extends ParsedElementInfo {
value: JsonPrimitive | JsonStruct;
parent: JsonArray;
key: number;
stack: StackElement[];
}
export interface ParsedObjectProperty extends ParsedElementInfo {
value: JsonPrimitive | JsonStruct;
parent: JsonObject;
key: string;
stack: StackElement[];
}
export interface ParsedTopLevelElement extends ParsedElementInfo {
value: JsonPrimitive | JsonStruct;
parent: undefined;
key: undefined;
stack: [];
}

View File

@@ -0,0 +1,58 @@
import TokenType from "./tokenType.js";
import type { JsonPrimitive } from "./jsonTypes.js";
export interface ParsedTokenInfo {
token: TokenType;
value: JsonPrimitive;
offset: number;
partial?: boolean;
}
export interface ParsedLeftBraceTokenInfo extends ParsedTokenInfo {
token: TokenType.LEFT_BRACE;
value: "{";
}
export interface ParsedRightBraceTokenInfo extends ParsedTokenInfo {
token: TokenType.RIGHT_BRACE;
value: "}";
}
export interface ParsedLeftBracketTokenInfo extends ParsedTokenInfo {
token: TokenType.LEFT_BRACKET;
value: "[";
}
export interface ParsedRighBracketTokenInfo extends ParsedTokenInfo {
token: TokenType.RIGHT_BRACKET;
value: "]";
}
export interface ParsedColonTokenInfo extends ParsedTokenInfo {
token: TokenType.COLON;
value: ":";
}
export interface ParsedCommaTokenInfo extends ParsedTokenInfo {
token: TokenType.COMMA;
value: ",";
}
export interface ParsedTrueTokenInfo extends ParsedTokenInfo {
token: TokenType.TRUE;
value: true;
}
export interface ParsedFalseTokenInfo extends ParsedTokenInfo {
token: TokenType.FALSE;
value: false;
}
export interface ParsedNullTokenInfo extends ParsedTokenInfo {
token: TokenType.NULL;
value: null;
}
export interface ParsedStringTokenInfo extends ParsedTokenInfo {
token: TokenType.STRING;
value: string;
}
export interface ParsedNumberTokenInfo extends ParsedTokenInfo {
token: TokenType.NUMBER;
value: number;
}
export interface ParsedSeparatorTokenInfo extends ParsedTokenInfo {
token: TokenType.SEPARATOR;
value: string;
}

View File

@@ -0,0 +1,13 @@
import type { JsonKey, JsonStruct } from "./jsonTypes.js";
export const enum TokenParserMode {
OBJECT,
ARRAY,
}
export interface StackElement {
key: JsonKey;
value: JsonStruct;
mode?: TokenParserMode;
emit: boolean;
}

View File

@@ -0,0 +1,16 @@
enum TokenType {
LEFT_BRACE,
RIGHT_BRACE,
LEFT_BRACKET,
RIGHT_BRACKET,
COLON,
COMMA,
TRUE,
FALSE,
NULL,
STRING,
NUMBER,
SEPARATOR,
}
export default TokenType;

113
dev/env/node_modules/@streamparser/json/src/utils/utf-8.ts generated vendored Executable file
View File

@@ -0,0 +1,113 @@
export const enum charset {
BACKSPACE = 0x8, // "\b"
FORM_FEED = 0xc, // "\f"
NEWLINE = 0xa, // "\n"
CARRIAGE_RETURN = 0xd, // "\r"
TAB = 0x9, // "\t"
SPACE = 0x20, //
EXCLAMATION_MARK = 0x21, // !
QUOTATION_MARK = 0x22, // "
NUMBER_SIGN = 0x23, // #
DOLLAR_SIGN = 0x24, // $
PERCENT_SIGN = 0x25, // %
AMPERSAND = 0x26, // &
APOSTROPHE = 0x27, // '
LEFT_PARENTHESIS = 0x28, // (
RIGHT_PARENTHESIS = 0x29, // )
ASTERISK = 0x2a, // *
PLUS_SIGN = 0x2b, // +
COMMA = 0x2c, // ,
HYPHEN_MINUS = 0x2d, // -
FULL_STOP = 0x2e, // .
SOLIDUS = 0x2f, // /
DIGIT_ZERO = 0x30, // 0
DIGIT_ONE = 0x31, // 1
DIGIT_TWO = 0x32, // 2
DIGIT_THREE = 0x33, // 3
DIGIT_FOUR = 0x34, // 4
DIGIT_FIVE = 0x35, // 5
DIGIT_SIX = 0x36, // 6
DIGIT_SEVEN = 0x37, // 7
DIGIT_EIGHT = 0x38, // 8
DIGIT_NINE = 0x39, // 9
COLON = 0x3a, // =
SEMICOLON = 0x3b, // ;
LESS_THAN_SIGN = 0x3c, // <
EQUALS_SIGN = 0x3d, // =
GREATER_THAN_SIGN = 0x3e, // >
QUESTION_MARK = 0x3f, // ?
COMMERCIAL_AT = 0x40, // @
LATIN_CAPITAL_LETTER_A = 0x41, // A
LATIN_CAPITAL_LETTER_B = 0x42, // B
LATIN_CAPITAL_LETTER_C = 0x43, // C
LATIN_CAPITAL_LETTER_D = 0x44, // D
LATIN_CAPITAL_LETTER_E = 0x45, // E
LATIN_CAPITAL_LETTER_F = 0x46, // F
LATIN_CAPITAL_LETTER_G = 0x47, // G
LATIN_CAPITAL_LETTER_H = 0x48, // H
LATIN_CAPITAL_LETTER_I = 0x49, // I
LATIN_CAPITAL_LETTER_J = 0x4a, // J
LATIN_CAPITAL_LETTER_K = 0x4b, // K
LATIN_CAPITAL_LETTER_L = 0x4c, // L
LATIN_CAPITAL_LETTER_M = 0x4d, // M
LATIN_CAPITAL_LETTER_N = 0x4e, // N
LATIN_CAPITAL_LETTER_O = 0x4f, // O
LATIN_CAPITAL_LETTER_P = 0x50, // P
LATIN_CAPITAL_LETTER_Q = 0x51, // Q
LATIN_CAPITAL_LETTER_R = 0x52, // R
LATIN_CAPITAL_LETTER_S = 0x53, // S
LATIN_CAPITAL_LETTER_T = 0x54, // T
LATIN_CAPITAL_LETTER_U = 0x55, // U
LATIN_CAPITAL_LETTER_V = 0x56, // V
LATIN_CAPITAL_LETTER_W = 0x57, // W
LATIN_CAPITAL_LETTER_X = 0x58, // X
LATIN_CAPITAL_LETTER_Y = 0x59, // Y
LATIN_CAPITAL_LETTER_Z = 0x5a, // Z
LEFT_SQUARE_BRACKET = 0x5b, // [
REVERSE_SOLIDUS = 0x5c, // \
RIGHT_SQUARE_BRACKET = 0x5d, // ]
CIRCUMFLEX_ACCENT = 0x5e, // ^
LOW_LINE = 0x5f, // _
GRAVE_ACCENT = 0x60, // `
LATIN_SMALL_LETTER_A = 0x61, // a
LATIN_SMALL_LETTER_B = 0x62, // b
LATIN_SMALL_LETTER_C = 0x63, // c
LATIN_SMALL_LETTER_D = 0x64, // d
LATIN_SMALL_LETTER_E = 0x65, // e
LATIN_SMALL_LETTER_F = 0x66, // f
LATIN_SMALL_LETTER_G = 0x67, // g
LATIN_SMALL_LETTER_H = 0x68, // h
LATIN_SMALL_LETTER_I = 0x69, // i
LATIN_SMALL_LETTER_J = 0x6a, // j
LATIN_SMALL_LETTER_K = 0x6b, // k
LATIN_SMALL_LETTER_L = 0x6c, // l
LATIN_SMALL_LETTER_M = 0x6d, // m
LATIN_SMALL_LETTER_N = 0x6e, // n
LATIN_SMALL_LETTER_O = 0x6f, // o
LATIN_SMALL_LETTER_P = 0x70, // p
LATIN_SMALL_LETTER_Q = 0x71, // q
LATIN_SMALL_LETTER_R = 0x72, // r
LATIN_SMALL_LETTER_S = 0x73, // s
LATIN_SMALL_LETTER_T = 0x74, // t
LATIN_SMALL_LETTER_U = 0x75, // u
LATIN_SMALL_LETTER_V = 0x76, // v
LATIN_SMALL_LETTER_W = 0x77, // w
LATIN_SMALL_LETTER_X = 0x78, // x
LATIN_SMALL_LETTER_Y = 0x79, // y
LATIN_SMALL_LETTER_Z = 0x7a, // z
LEFT_CURLY_BRACKET = 0x7b, // {
VERTICAL_LINE = 0x7c, // |
RIGHT_CURLY_BRACKET = 0x7d, // }
TILDE = 0x7e, // ~
}
export const escapedSequences: { [key: number]: number } = {
[charset.QUOTATION_MARK]: charset.QUOTATION_MARK,
[charset.REVERSE_SOLIDUS]: charset.REVERSE_SOLIDUS,
[charset.SOLIDUS]: charset.SOLIDUS,
[charset.LATIN_SMALL_LETTER_B]: charset.BACKSPACE,
[charset.LATIN_SMALL_LETTER_F]: charset.FORM_FEED,
[charset.LATIN_SMALL_LETTER_N]: charset.NEWLINE,
[charset.LATIN_SMALL_LETTER_R]: charset.CARRIAGE_RETURN,
[charset.LATIN_SMALL_LETTER_T]: charset.TAB,
};