Files
aitbc/dev/env/node_modules/@streamparser/json-node/dist/cjs/tokenizer.js
aitbc 816e258d4c refactor: move brother_node development artifact to dev/test-nodes subdirectory
Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
2026-03-30 17:09:06 +02:00

51 lines
1.7 KiB
JavaScript
Executable File

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const tokenizer_js_1 = __importDefault(require("@streamparser/json/tokenizer.js"));
class TokenizerTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenizer = new tokenizer_js_1.default(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenizer.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = TokenizerTransform;
//# sourceMappingURL=tokenizer.js.map