refactor: move brother_node development artifact to dev/test-nodes subdirectory

Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
2026-03-30 17:09:06 +02:00
parent bf730dcb4a
commit 816e258d4c
11734 changed files with 2001707 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, type ParsedTokenInfo, type ParsedElementInfo, TokenParserMode, type StackElement, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EACT,KAAK,eAAe,EACpB,KAAK,iBAAiB,EACtB,eAAe,EACf,KAAK,YAAY,EACjB,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,18 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TokenType = exports.TokenParserMode = exports.JsonTypes = exports.utf8 = exports.TokenParser = exports.Tokenizer = exports.JSONParser = void 0;
var jsonparser_js_1 = require("./jsonparser.js");
Object.defineProperty(exports, "JSONParser", { enumerable: true, get: function () { return __importDefault(jsonparser_js_1).default; } });
var tokenizer_js_1 = require("./tokenizer.js");
Object.defineProperty(exports, "Tokenizer", { enumerable: true, get: function () { return __importDefault(tokenizer_js_1).default; } });
var tokenparser_js_1 = require("./tokenparser.js");
Object.defineProperty(exports, "TokenParser", { enumerable: true, get: function () { return __importDefault(tokenparser_js_1).default; } });
var json_1 = require("@streamparser/json");
Object.defineProperty(exports, "utf8", { enumerable: true, get: function () { return json_1.utf8; } });
Object.defineProperty(exports, "JsonTypes", { enumerable: true, get: function () { return json_1.JsonTypes; } });
Object.defineProperty(exports, "TokenParserMode", { enumerable: true, get: function () { return json_1.TokenParserMode; } });
Object.defineProperty(exports, "TokenType", { enumerable: true, get: function () { return json_1.TokenType; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,iDAAwD;AAA/C,4HAAA,OAAO,OAAc;AAC9B,+CAAsD;AAA7C,0HAAA,OAAO,OAAa;AAC7B,mDAA0D;AAAjD,8HAAA,OAAO,OAAe;AAE/B,2CAQ4B;AAP1B,4FAAA,IAAI,OAAA;AACJ,iGAAA,SAAS,OAAA;AAGT,uGAAA,eAAe,OAAA;AAEf,iGAAA,SAAS,OAAA"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type JSONParserOptions } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
private jsonParser;
constructor(opts?: JSONParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=jsonparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.d.ts","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAc,KAAK,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IACxD,OAAO,CAAC,UAAU,CAAa;gBAG7B,IAAI,GAAE,iBAAsB,EAC5B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const json_1 = require("@streamparser/json");
class JSONParserTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: false, readableObjectMode: true }));
this.jsonParser = new json_1.JSONParser(opts);
this.jsonParser.onValue = (value) => this.push(value);
this.jsonParser.onError = (err) => {
throw err;
};
this.jsonParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.jsonParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.jsonParser.isEnded)
this.jsonParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = JSONParserTransform;
//# sourceMappingURL=jsonparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.js","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,mCAIgB;AAChB,6CAAwE;AAExE,MAAqB,mBAAoB,SAAQ,kBAAS;IAGxD,YACE,OAA0B,EAAE,EAC5B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,KAAK,EACzB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,UAAU,GAAG,IAAI,iBAAU,CAAC,IAAI,CAAC,CAAC;QAEvC,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAChC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE;YAC3B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC7B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC;YACpD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,sCAuDC"}

View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenizerOptions } from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
private tokenizer;
constructor(opts?: TokenizerOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenizer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAkB,EAChB,KAAK,gBAAgB,EACtB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IACvD,OAAO,CAAC,SAAS,CAAY;gBAG3B,IAAI,GAAE,gBAAqB,EAC3B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const tokenizer_js_1 = __importDefault(require("@streamparser/json/tokenizer.js"));
class TokenizerTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenizer = new tokenizer_js_1.default(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenizer.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = TokenizerTransform;
//# sourceMappingURL=tokenizer.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;;;;AAAA,mCAIgB;AAChB,mFAEyC;AAEzC,MAAqB,kBAAmB,SAAQ,kBAAS;IAGvD,YACE,OAAyB,EAAE,EAC3B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,SAAS,GAAG,IAAI,sBAAS,CAAC,IAAI,CAAC,CAAC;QAErC,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QACzE,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAC/B,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,KAAK,GAAG,GAAG,EAAE;YAC1B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC5B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO;gBAAE,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC;YAClD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,qCAuDC"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenParserOptions } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
private tokenParser;
constructor(opts?: TokenParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.d.ts","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAe,KAAK,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IACzD,OAAO,CAAC,WAAW,CAAc;gBAG/B,IAAI,GAAE,kBAAuB,EAC7B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const json_1 = require("@streamparser/json");
class TokenParserTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenParser = new json_1.TokenParser(opts);
this.tokenParser.onValue = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenParser.onError = (err) => {
throw err;
};
this.tokenParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenParser.isEnded)
this.tokenParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = TokenParserTransform;
//# sourceMappingURL=tokenparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.js","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,mCAIgB;AAChB,6CAA0E;AAE1E,MAAqB,oBAAqB,SAAQ,kBAAS;IAGzD,YACE,OAA2B,EAAE,EAC7B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,WAAW,GAAG,IAAI,kBAAW,CAAC,IAAI,CAAC,CAAC;QAEzC,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QAC3E,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YACjC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,GAAG,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC9B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;YACtD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,uCAuDC"}