Files
aitbc/dev/env/node_modules/@streamparser/json/dist/mjs/jsonparser.js
aitbc 816e258d4c refactor: move brother_node development artifact to dev/test-nodes subdirectory
Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
2026-03-30 17:09:06 +02:00

47 lines
1.4 KiB
JavaScript
Executable File

import Tokenizer, {} from "./tokenizer.js";
import TokenParser, {} from "./tokenparser.js";
export default class JSONParser {
constructor(opts = {}) {
this.tokenizer = new Tokenizer(opts);
this.tokenParser = new TokenParser(opts);
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
this.tokenizer.onEnd = () => {
if (!this.tokenParser.isEnded)
this.tokenParser.end();
};
this.tokenParser.onError = this.tokenizer.error.bind(this.tokenizer);
this.tokenParser.onEnd = () => {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
};
}
get isEnded() {
return this.tokenizer.isEnded && this.tokenParser.isEnded;
}
write(input) {
this.tokenizer.write(input);
}
end() {
this.tokenizer.end();
}
set onToken(cb) {
this.tokenizer.onToken = (parsedToken) => {
cb(parsedToken);
this.tokenParser.write(parsedToken);
};
}
set onValue(cb) {
this.tokenParser.onValue = cb;
}
set onError(cb) {
this.tokenizer.onError = cb;
}
set onEnd(cb) {
this.tokenParser.onEnd = () => {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
cb.call(this.tokenParser);
};
}
}
//# sourceMappingURL=jsonparser.js.map