refactor: move brother_node development artifact to dev/test-nodes subdirectory

Development Artifact Cleanup:
 BROTHER_NODE REORGANIZATION: Moved development test node to appropriate location
- dev/test-nodes/brother_node/: Moved from root directory for better organization
- Contains development configuration, test logs, and test chain data
- No impact on production systems - purely development/testing artifact

 DEVELOPMENT ARTIFACTS IDENTIFIED:
- Chain ID: aitbc-brother-chain (test/development chain)
- Ports: 8010 (P2P) and 8011 (RPC) - different from production
- Environment: .env file with test configuration
- Logs: rpc.log and node.log from development testing session (March 15, 2026)

 ROOT DIRECTORY CLEANUP: Removed development clutter from production directory
- brother_node/ moved to dev/test-nodes/brother_node/
- Root directory now contains only production-ready components
- Development artifacts properly organized in dev/ subdirectory

DIRECTORY STRUCTURE IMPROVEMENT:
📁 dev/test-nodes/: Development and testing node configurations
🏗️ Root Directory: Clean production structure with only essential components
🧪 Development Isolation: Test environments separated from production

BENEFITS:
 Clean Production Directory: No development artifacts in root
 Better Organization: Development nodes grouped in dev/ subdirectory
 Clear Separation: Production vs development environments clearly distinguished
 Maintainability: Easier to identify and manage development components

RESULT: Successfully moved brother_node development artifact to dev/test-nodes/ subdirectory, cleaning up the root directory while preserving development testing environment for future use.
This commit is contained in:
2026-03-30 17:09:06 +02:00
parent bf730dcb4a
commit 816e258d4c
11734 changed files with 2001707 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, type ParsedTokenInfo, type ParsedElementInfo, TokenParserMode, type StackElement, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EACT,KAAK,eAAe,EACpB,KAAK,iBAAiB,EACtB,eAAe,EACf,KAAK,YAAY,EACjB,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,18 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TokenType = exports.TokenParserMode = exports.JsonTypes = exports.utf8 = exports.TokenParser = exports.Tokenizer = exports.JSONParser = void 0;
var jsonparser_js_1 = require("./jsonparser.js");
Object.defineProperty(exports, "JSONParser", { enumerable: true, get: function () { return __importDefault(jsonparser_js_1).default; } });
var tokenizer_js_1 = require("./tokenizer.js");
Object.defineProperty(exports, "Tokenizer", { enumerable: true, get: function () { return __importDefault(tokenizer_js_1).default; } });
var tokenparser_js_1 = require("./tokenparser.js");
Object.defineProperty(exports, "TokenParser", { enumerable: true, get: function () { return __importDefault(tokenparser_js_1).default; } });
var json_1 = require("@streamparser/json");
Object.defineProperty(exports, "utf8", { enumerable: true, get: function () { return json_1.utf8; } });
Object.defineProperty(exports, "JsonTypes", { enumerable: true, get: function () { return json_1.JsonTypes; } });
Object.defineProperty(exports, "TokenParserMode", { enumerable: true, get: function () { return json_1.TokenParserMode; } });
Object.defineProperty(exports, "TokenType", { enumerable: true, get: function () { return json_1.TokenType; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,iDAAwD;AAA/C,4HAAA,OAAO,OAAc;AAC9B,+CAAsD;AAA7C,0HAAA,OAAO,OAAa;AAC7B,mDAA0D;AAAjD,8HAAA,OAAO,OAAe;AAE/B,2CAQ4B;AAP1B,4FAAA,IAAI,OAAA;AACJ,iGAAA,SAAS,OAAA;AAGT,uGAAA,eAAe,OAAA;AAEf,iGAAA,SAAS,OAAA"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type JSONParserOptions } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
private jsonParser;
constructor(opts?: JSONParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=jsonparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.d.ts","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAc,KAAK,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IACxD,OAAO,CAAC,UAAU,CAAa;gBAG7B,IAAI,GAAE,iBAAsB,EAC5B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const json_1 = require("@streamparser/json");
class JSONParserTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: false, readableObjectMode: true }));
this.jsonParser = new json_1.JSONParser(opts);
this.jsonParser.onValue = (value) => this.push(value);
this.jsonParser.onError = (err) => {
throw err;
};
this.jsonParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.jsonParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.jsonParser.isEnded)
this.jsonParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = JSONParserTransform;
//# sourceMappingURL=jsonparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.js","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,mCAIgB;AAChB,6CAAwE;AAExE,MAAqB,mBAAoB,SAAQ,kBAAS;IAGxD,YACE,OAA0B,EAAE,EAC5B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,KAAK,EACzB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,UAAU,GAAG,IAAI,iBAAU,CAAC,IAAI,CAAC,CAAC;QAEvC,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAChC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE;YAC3B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC7B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC;YACpD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,sCAuDC"}

View File

@@ -0,0 +1,3 @@
{
"type": "commonjs"
}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenizerOptions } from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
private tokenizer;
constructor(opts?: TokenizerOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenizer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAkB,EAChB,KAAK,gBAAgB,EACtB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IACvD,OAAO,CAAC,SAAS,CAAY;gBAG3B,IAAI,GAAE,gBAAqB,EAC3B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,51 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const tokenizer_js_1 = __importDefault(require("@streamparser/json/tokenizer.js"));
class TokenizerTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenizer = new tokenizer_js_1.default(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenizer.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = TokenizerTransform;
//# sourceMappingURL=tokenizer.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;;;;AAAA,mCAIgB;AAChB,mFAEyC;AAEzC,MAAqB,kBAAmB,SAAQ,kBAAS;IAGvD,YACE,OAAyB,EAAE,EAC3B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,SAAS,GAAG,IAAI,sBAAS,CAAC,IAAI,CAAC,CAAC;QAErC,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QACzE,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAC/B,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,KAAK,GAAG,GAAG,EAAE;YAC1B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC5B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO;gBAAE,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC;YAClD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,qCAuDC"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenParserOptions } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
private tokenParser;
constructor(opts?: TokenParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.d.ts","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAe,KAAK,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IACzD,OAAO,CAAC,WAAW,CAAc;gBAG/B,IAAI,GAAE,kBAAuB,EAC7B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,48 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const stream_1 = require("stream");
const json_1 = require("@streamparser/json");
class TokenParserTransform extends stream_1.Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenParser = new json_1.TokenParser(opts);
this.tokenParser.onValue = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenParser.onError = (err) => {
throw err;
};
this.tokenParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenParser.isEnded)
this.tokenParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
exports.default = TokenParserTransform;
//# sourceMappingURL=tokenparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.js","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,mCAIgB;AAChB,6CAA0E;AAE1E,MAAqB,oBAAqB,SAAQ,kBAAS;IAGzD,YACE,OAA2B,EAAE,EAC7B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,WAAW,GAAG,IAAI,kBAAW,CAAC,IAAI,CAAC,CAAC;QAEzC,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QAC3E,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YACjC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,GAAG,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC9B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;YACtD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF;AAvDD,uCAuDC"}

View File

@@ -0,0 +1,193 @@
# @streamparser/json-node
[![npm version][npm-version-badge]][npm-badge-url]
[![npm monthly downloads][npm-downloads-badge]][npm-badge-url]
[![Build Status][build-status-badge]][build-status-url]
[![Coverage Status][coverage-status-badge]][coverage-status-url]
Fast dependency-free library to parse a JSON stream using utf-8 encoding in Node.js, Deno or any modern browser. Fully compliant with the JSON spec and `JSON.parse(...)`.
*tldr;*
```javascript
import { JSONParser } from '@streamparser/json-node';
const parser = new JSONParser();
inputStream.pipe(jsonparser).pipe(destinationStream);
// Or using events to get the values
parser.on("data", (value) => { /* ... */ });
parser.on("error", err => { /* ... */ });
parser.on("end", () => { /* ... */ });
```
## @streamparser/json ecosystem
There are multiple flavours of @streamparser:
* The **[@streamparser/json](https://www.npmjs.com/package/@streamparser/json)** package allows to parse any JSON string or stream using pure Javascript.
* The **[@streamparser/json-whatwg](https://www.npmjs.com/package/@streamparser/json-whatwg)** wraps `@streamparser/json` into a WHATWG TransformStream.
* The **[@streamparser/json-node](https://www.npmjs.com/package/@streamparser/json-node)** wraps `@streamparser/json` into a node Transform stream.
## Components
### Tokenizer
A JSON compliant tokenizer that parses a utf-8 stream into JSON tokens that are emitted as objects.
```javascript
import { Tokenizer } from '@streamparser/json-node';
const tokenizer = new Tokenizer(opts, transformOpts);
```
Transform options take the standard node Transform stream settings (see [Node docs](https://nodejs.org/api/stream.html#class-streamtransform)).
The available options are:
```javascript
{
stringBufferSize: <number>, // set to 0 to don't buffer. Min valid value is 4.
numberBufferSize: <number>, // set to 0 to don't buffer.
separator: <string>, // separator between object. For example `\n` for nd-js.
emitPartialTokens: <boolean> // whether to emit tokens mid-parsing.
}
```
If buffer sizes are set to anything else than zero, instead of using a string to apppend the data as it comes in, the data is buffered using a TypedArray. A reasonable size could be `64 * 1024` (64 KB).
#### Buffering
When parsing strings or numbers, the parser needs to gather the data in-memory until the whole value is ready.
Strings are inmutable in Javascript so every string operation creates a new string. The V8 engine, behind Node, Deno and most modern browsers, performs a many different types of optimization. One of this optimizations is to over-allocate memory when it detects many string concatenations. This increases significatly the memory consumption and can easily exhaust your memory when parsing JSON containing very large strings or numbers. For those cases, the parser can buffer the characters using a TypedArray. This requires encoding/decoding from/to the buffer into an actual string once the value is ready. This is done using the `TextEncoder` and `TextDecoder` APIs. Unfortunately, these APIs creates a significant overhead when the strings are small so should be used only when strictly necessary.
### TokenParser
A token parser that processes JSON tokens as emitted by the `Tokenizer` and emits JSON values/objects.
```javascript
import { TokenParser} from '@streamparser/json-node';
const tokenParser = new TokenParser(opts, writableStrategy, readableStrategy);
```
Transform options take the standard node Transform stream settings (see [Node docs](https://nodejs.org/api/stream.html#class-streamtransform)).
The available options are:
```javascript
{
paths: <string[]>,
keepStack: <boolean>, // whether to keep all the properties in the stack
separator: <string>, // separator between object. For example `\n` for nd-js. If left empty or set to undefined, the token parser will end after parsing the first object. To parse multiple object without any delimiter just set it to the empty string `''`.
emitPartialValues: <boolean>, // whether to emit values mid-parsing.
}
```
* paths: Array of paths to emit. Defaults to `undefined` which emits everything. The paths are intended to suppot jsonpath although at the time being it only supports the root object selector (`$`) and subproperties selectors including wildcards (`$.a`, `$.*`, `$.a.b`, , `$.*.b`, etc).
* keepStack: Whether to keep full objects on the stack even if they won't be emitted. Defaults to `true`. When set to `false` the it does preserve properties in the parent object some ancestor will be emitted. This means that the parent object passed to the `onValue` function will be empty, which doesn't reflect the truth, but it's more memory-efficient.
### JSONParser
The full blown JSON parser. It basically chains a `Tokenizer` and a `TokenParser`.
```javascript
import { JSONParser } from '@streamparser/json-node';
const parser = new JSONParser();
```
## Usage
You can use both components independently as
```javascript
const tokenizer = new Tokenizer(opts);
const tokenParser = new TokenParser();
const jsonParser = tokenizer.pipeTrough(tokenParser);
```
You can subscribe to the resulting data using the
```javascript
import { JSONParser } from '@streamparser/json-node';
const parser = new JSONParser({ stringBufferSize: undefined, paths: ['$'] });
inputStream.pipe(jsonparser).pipe(destinationStream);
// Or using events to get the values
parser.on("data", (value) => { /* ... */ });
parser.on("error", err => { /* ... */ });
parser.on("end", () => { /* ... */ });
```
## Examples
### Stream-parsing a fetch request returning a JSONstream
Imagine an endpoint that send a large amount of JSON objects one after the other (`{"id":1}{"id":2}{"id":3}...`).
```js
import { JSONParser} from '@streamparser/json-node';
const parser = new JSONParser();
const response = await fetch('http://example.com/');
const reader = response.body.pipe(parser);
reader.on('data', value => /* process element */);
```
### Stream-parsing a fetch request returning a JSON array
Imagine an endpoint that send a large amount of JSON objects one after the other (`[{"id":1},{"id":2},{"id":3},...]`).
```js
import { JSONParser } from '@streamparser/json-node';
const parser = new JSONParser({ stringBufferSize: undefined, paths: ['$.*'], keepStack: false });
const response = await fetch('http://example.com/');
const reader = response.body.pipe(parse).getReader();
reader.on('data', ({ value, key, parent, stack }) => /* process element */)
```
### Stream-parsing a fetch request returning a very long string getting previews of the string
Imagine an endpoint that send a large amount of JSON objects one after the other (`"Once upon a midnight <...>"`).
```js
import { JSONParser } from '@streamparser/json-node';
const parser = new JSONParser({ stringBufferSize: undefined, paths: ['$.*'], keepStack: false });
const response = await fetch('http://example.com/');
const reader = response.body.pipe(parse).getReader();
reader.on('data', ({ value, key, parent, stack, partial }) => {
if (partial) {
console.log(`Parsing value: ${value}... (still parsing)`);
} else {
console.log(`Value parsed: ${value}`);
}
});
```
## License
See [LICENSE.md](../../LICENSE).
[npm-version-badge]: https://badge.fury.io/js/@streamparser%2Fjson-node.svg
[npm-badge-url]: https://www.npmjs.com/package/@streamparser/json-node
[npm-downloads-badge]: https://img.shields.io/npm/dm/@streamparser%2Fjson-node.svg
[build-status-badge]: https://github.com/juanjoDiaz/streamparser-json/actions/workflows/on-push.yaml/badge.svg
[build-status-url]: https://github.com/juanjoDiaz/streamparser-json/actions/workflows/on-push.yaml
[coverage-status-badge]: https://coveralls.io/repos/github/juanjoDiaz/streamparser-json/badge.svg?branch=main
[coverage-status-url]: https://coveralls.io/github/juanjoDiaz/streamparser-json?branch=main

View File

@@ -0,0 +1,13 @@
export { default as JSONParser } from "./jsonparser.ts";
export { default as Tokenizer } from "./tokenizer.ts";
export { default as TokenParser } from "./tokenparser.ts";
export {
utf8,
JsonTypes,
type ParsedTokenInfo,
type ParsedElementInfo,
TokenParserMode,
type StackElement,
TokenType,
} from "https://deno.land/x/streamparser_json@v0.0.22/index.ts";

View File

@@ -0,0 +1,63 @@
import {
Transform,
type TransformOptions,
type TransformCallback,
} from "stream";
import { JSONParser, type JSONParserOptions } from "https://deno.land/x/streamparser_json@v0.0.22/index.ts";
export default class JSONParserTransform extends Transform {
private jsonParser: JSONParser;
constructor(
opts: JSONParserOptions = {},
transformOpts: Omit<
TransformOptions,
"readableObjectMode" | "writableObjectMode"
> = {},
) {
super({
...transformOpts,
writableObjectMode: false,
readableObjectMode: true,
});
this.jsonParser = new JSONParser(opts);
this.jsonParser.onValue = (value) => this.push(value);
this.jsonParser.onError = (err) => {
throw err;
};
this.jsonParser.onEnd = () => {
if (!this.writableEnded) this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
override _transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk: any,
encoding: BufferEncoding,
done: TransformCallback,
): void {
try {
this.jsonParser.write(chunk);
done();
} catch (err: unknown) {
done(err as Error);
}
}
override _final(callback: (error?: Error | null) => void): void {
try {
if (!this.jsonParser.isEnded) this.jsonParser.end();
callback();
} catch (err: unknown) {
callback(err as Error);
}
}
}

View File

@@ -0,0 +1,65 @@
import {
Transform,
type TransformOptions,
type TransformCallback,
} from "stream";
import Tokenizer, {
type TokenizerOptions,
} from "https://deno.land/x/streamparser_json@v0.0.22/tokenizer.ts";
export default class TokenizerTransform extends Transform {
private tokenizer: Tokenizer;
constructor(
opts: TokenizerOptions = {},
transformOpts: Omit<
TransformOptions,
"readableObjectMode" | "writableObjectMode"
> = {},
) {
super({
...transformOpts,
writableObjectMode: true,
readableObjectMode: true,
});
this.tokenizer = new Tokenizer(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded) this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
override _transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk: any,
encoding: BufferEncoding,
done: TransformCallback,
): void {
try {
this.tokenizer.write(chunk);
done();
} catch (err: unknown) {
done(err as Error);
}
}
override _final(callback: (error?: Error | null) => void): void {
try {
if (!this.tokenizer.isEnded) this.tokenizer.end();
callback();
} catch (err: unknown) {
callback(err as Error);
}
}
}

View File

@@ -0,0 +1,63 @@
import {
Transform,
type TransformOptions,
type TransformCallback,
} from "stream";
import { TokenParser, type TokenParserOptions } from "https://deno.land/x/streamparser_json@v0.0.22/index.ts";
export default class TokenParserTransform extends Transform {
private tokenParser: TokenParser;
constructor(
opts: TokenParserOptions = {},
transformOpts: Omit<
TransformOptions,
"readableObjectMode" | "writableObjectMode"
> = {},
) {
super({
...transformOpts,
writableObjectMode: true,
readableObjectMode: true,
});
this.tokenParser = new TokenParser(opts);
this.tokenParser.onValue = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenParser.onError = (err) => {
throw err;
};
this.tokenParser.onEnd = () => {
if (!this.writableEnded) this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
override _transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk: any,
encoding: BufferEncoding,
done: TransformCallback,
): void {
try {
this.tokenParser.write(chunk);
done();
} catch (err: unknown) {
done(err as Error);
}
}
override _final(callback: (error?: Error | null) => void): void {
try {
if (!this.tokenParser.isEnded) this.tokenParser.end();
callback();
} catch (err: unknown) {
callback(err as Error);
}
}
}

View File

@@ -0,0 +1,16 @@
import type { ParsedElementInfo } from "https://deno.land/x/streamparser_json@v0.0.17/utils/types/parsedElementInfo.ts";
export function cloneParsedElementInfo(
parsedElementInfo: ParsedElementInfo,
): ParsedElementInfo {
const { value, key, parent, stack } = parsedElementInfo;
return { value, key, parent: clone(parent), stack: clone(stack) };
}
function clone<T>(obj: T): T {
// Only objects are passed by reference and must be cloned
if (typeof obj !== "object") return obj;
// Solve arrays with empty positions
if (Array.isArray(obj) && obj.filter((i) => i).length === 0) return obj;
return JSON.parse(JSON.stringify(obj));
}

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, type ParsedTokenInfo, type ParsedElementInfo, TokenParserMode, type StackElement, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EACT,KAAK,eAAe,EACpB,KAAK,iBAAiB,EACtB,eAAe,EACf,KAAK,YAAY,EACjB,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,5 @@
export { default as JSONParser } from "./jsonparser.js";
export { default as Tokenizer } from "./tokenizer.js";
export { default as TokenParser } from "./tokenparser.js";
export { utf8, JsonTypes, TokenParserMode, TokenType, } from "@streamparser/json";
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,MAAM,gBAAgB,CAAC;AACtD,OAAO,EAAE,OAAO,IAAI,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE1D,OAAO,EACL,IAAI,EACJ,SAAS,EAGT,eAAe,EAEf,SAAS,GACV,MAAM,oBAAoB,CAAC"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type JSONParserOptions } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
private jsonParser;
constructor(opts?: JSONParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=jsonparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.d.ts","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAc,KAAK,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IACxD,OAAO,CAAC,UAAU,CAAa;gBAG7B,IAAI,GAAE,iBAAsB,EAC5B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import { JSONParser } from "@streamparser/json";
export default class JSONParserTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: false, readableObjectMode: true }));
this.jsonParser = new JSONParser(opts);
this.jsonParser.onValue = (value) => this.push(value);
this.jsonParser.onError = (err) => {
throw err;
};
this.jsonParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.jsonParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.jsonParser.isEnded)
this.jsonParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=jsonparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"jsonparser.js","sourceRoot":"","sources":["../../src/jsonparser.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,UAAU,EAA0B,MAAM,oBAAoB,CAAC;AAExE,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,SAAS;IAGxD,YACE,OAA0B,EAAE,EAC5B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,KAAK,EACzB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QAEvC,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAChC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,UAAU,CAAC,KAAK,GAAG,GAAG,EAAE;YAC3B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC7B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO;gBAAE,IAAI,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC;YACpD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenizerOptions } from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
private tokenizer;
constructor(opts?: TokenizerOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenizer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAkB,EAChB,KAAK,gBAAgB,EACtB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IACvD,OAAO,CAAC,SAAS,CAAY;gBAG3B,IAAI,GAAE,gBAAqB,EAC3B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import Tokenizer, {} from "@streamparser/json/tokenizer.js";
export default class TokenizerTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenizer = new Tokenizer(opts);
this.tokenizer.onToken = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenizer.onError = (err) => {
throw err;
};
this.tokenizer.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenizer.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenizer.isEnded)
this.tokenizer.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=tokenizer.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenizer.js","sourceRoot":"","sources":["../../src/tokenizer.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,SAAS,EAAE,EAEjB,MAAM,iCAAiC,CAAC;AAEzC,MAAM,CAAC,OAAO,OAAO,kBAAmB,SAAQ,SAAS;IAGvD,YACE,OAAyB,EAAE,EAC3B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QAErC,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QACzE,IAAI,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YAC/B,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,KAAK,GAAG,GAAG,EAAE;YAC1B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC5B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO;gBAAE,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC;YAClD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}

View File

@@ -0,0 +1,18 @@
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import { Transform, type TransformOptions, type TransformCallback } from "stream";
import { type TokenParserOptions } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
private tokenParser;
constructor(opts?: TokenParserOptions, transformOpts?: Omit<TransformOptions, "readableObjectMode" | "writableObjectMode">);
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(chunk: any, encoding: BufferEncoding, done: TransformCallback): void;
_final(callback: (error?: Error | null) => void): void;
}
//# sourceMappingURL=tokenparser.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.d.ts","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,SAAS,EACT,KAAK,gBAAgB,EACrB,KAAK,iBAAiB,EACvB,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAe,KAAK,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IACzD,OAAO,CAAC,WAAW,CAAc;gBAG/B,IAAI,GAAE,kBAAuB,EAC7B,aAAa,GAAE,IAAI,CACjB,gBAAgB,EAChB,oBAAoB,GAAG,oBAAoB,CACvC;IAkBR;;;;;;OAMG;IACM,UAAU,CAEjB,KAAK,EAAE,GAAG,EACV,QAAQ,EAAE,cAAc,EACxB,IAAI,EAAE,iBAAiB,GACtB,IAAI;IASE,MAAM,CAAC,QAAQ,EAAE,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,IAAI,KAAK,IAAI,GAAG,IAAI;CAQhE"}

View File

@@ -0,0 +1,45 @@
import { Transform, } from "stream";
import { TokenParser } from "@streamparser/json";
export default class TokenParserTransform extends Transform {
constructor(opts = {}, transformOpts = {}) {
super(Object.assign(Object.assign({}, transformOpts), { writableObjectMode: true, readableObjectMode: true }));
this.tokenParser = new TokenParser(opts);
this.tokenParser.onValue = (parsedTokenInfo) => this.push(parsedTokenInfo);
this.tokenParser.onError = (err) => {
throw err;
};
this.tokenParser.onEnd = () => {
if (!this.writableEnded)
this.end();
};
}
/**
* Main function that send data to the parser to be processed.
*
* @param {Buffer} chunk Incoming data
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
* @param {Function} done Called when the proceesing of the supplied chunk is done
*/
_transform(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunk, encoding, done) {
try {
this.tokenParser.write(chunk);
done();
}
catch (err) {
done(err);
}
}
_final(callback) {
try {
if (!this.tokenParser.isEnded)
this.tokenParser.end();
callback();
}
catch (err) {
callback(err);
}
}
}
//# sourceMappingURL=tokenparser.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"tokenparser.js","sourceRoot":"","sources":["../../src/tokenparser.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,GAGV,MAAM,QAAQ,CAAC;AAChB,OAAO,EAAE,WAAW,EAA2B,MAAM,oBAAoB,CAAC;AAE1E,MAAM,CAAC,OAAO,OAAO,oBAAqB,SAAQ,SAAS;IAGzD,YACE,OAA2B,EAAE,EAC7B,gBAGI,EAAE;QAEN,KAAK,iCACA,aAAa,KAChB,kBAAkB,EAAE,IAAI,EACxB,kBAAkB,EAAE,IAAI,IACxB,CAAC;QACH,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;QAEzC,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,eAAe,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;QAC3E,IAAI,CAAC,WAAW,CAAC,OAAO,GAAG,CAAC,GAAG,EAAE,EAAE;YACjC,MAAM,GAAG,CAAC;QACZ,CAAC,CAAC;QACF,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,GAAG,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,aAAa;gBAAE,IAAI,CAAC,GAAG,EAAE,CAAC;QACtC,CAAC,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACM,UAAU;IACjB,8DAA8D;IAC9D,KAAU,EACV,QAAwB,EACxB,IAAuB;QAEvB,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;YAC9B,IAAI,EAAE,CAAC;SACR;QAAC,OAAO,GAAY,EAAE;YACrB,IAAI,CAAC,GAAY,CAAC,CAAC;SACpB;IACH,CAAC;IAEQ,MAAM,CAAC,QAAwC;QACtD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;YACtD,QAAQ,EAAE,CAAC;SACZ;QAAC,OAAO,GAAY,EAAE;YACrB,QAAQ,CAAC,GAAY,CAAC,CAAC;SACxB;IACH,CAAC;CACF"}