refactor: move brother_node development artifact to dev/test-nodes subdirectory

Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
2026-03-30 17:09:06 +02:00
parent bf730dcb4a
commit 816e258d4c
11734 changed files with 2001707 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, type ParsedTokenInfo, type ParsedElementInfo, TokenParserMode, type StackElement, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EACT,KAAK,eAAe,EACpB,KAAK,iBAAiB,EACtB,eAAe,EACf,KAAK,YAAY,EACjB,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, TokenParserMode, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EAGT,eAAe,EAEf,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type JSONParserOptions } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
private jsonParser;
constructor(opts?: JSONParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=jsonparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.d.ts","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAc,KAAK,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IACxD,OAAO,CAAC,UAAU,CAAa;gBAG7B,IAAI,GAAE,iBAAsB,EAC5B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import { JSONParser } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: false, readableObjectMode: true }));
this.jsonParser = new JSONParser(opts);
this.jsonParser.onValue = (value) => this.push(value);
this.jsonParser.onError = (err) => {
throw err;
};
this.jsonParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.jsonParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.jsonParser.isEnded)
this.jsonParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=jsonparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.js","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,UAAU,EAA0B,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IAGxD,YACE,OAA0B,EAAE,EAC5B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,KAAK,EACzB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QAEvC,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAChC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE;YAC3B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC7B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC;YACpD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenizerOptions } from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
private tokenizer;
constructor(opts?: TokenizerOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenizer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAkB,EAChB,KAAK,gBAAgB,EACtB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IACvD,OAAO,CAAC,SAAS,CAAY;gBAG3B,IAAI,GAAE,gBAAqB,EAC3B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import Tokenizer, {} from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenizer = new Tokenizer(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenizer.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=tokenizer.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,SAAS,EAAE,EAEjB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IAGvD,YACE,OAAyB,EAAE,EAC3B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QAErC,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QACzE,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAC/B,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,KAAK,GAAG,GAAG,EAAE;YAC1B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC5B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO;gBAAE,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC;YAClD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenParserOptions } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
private tokenParser;
constructor(opts?: TokenParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.d.ts","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAe,KAAK,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IACzD,OAAO,CAAC,WAAW,CAAc;gBAG/B,IAAI,GAAE,kBAAuB,EAC7B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import { TokenParser } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenParser = new TokenParser(opts);
this.tokenParser.onValue = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenParser.onError = (err) => {
throw err;
};
this.tokenParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenParser.isEnded)
this.tokenParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=tokenparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.js","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,WAAW,EAA2B,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IAGzD,YACE,OAA2B,EAAE,EAC7B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;QAEzC,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QAC3E,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YACjC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,GAAG,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC9B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;YACtD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}