-
-
Notifications
You must be signed in to change notification settings - Fork 1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Co-authored-by: Glenn Kroeze <[email protected]>
- Loading branch information
1 parent
867d8d2
commit 0548d55
Showing
8 changed files
with
367 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
.DS_Store | ||
*.test.js | ||
CHANGELOG.md | ||
lib | ||
tsconfig.json | ||
tsconfig-build.json | ||
tsconfig-build.tsbuildinfo |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# @serialport/parser-start-end | ||
|
||
See our api docs https://serialport.io/docs/api-parser-start-end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,197 @@ | ||
/* eslint-disable @typescript-eslint/no-explicit-any */ | ||
import sinon from 'sinon' | ||
import { StartEndParser } from './' | ||
import { assert } from '../../../test/assert' | ||
|
||
const STX = '\x02' | ||
const ETX = '\x03' | ||
|
||
describe('StartEndParser', () => { | ||
it('transforms data to strings split on startDelimiter and endDelimiter', () => { | ||
const spy = sinon.spy() | ||
const parser = new StartEndParser({ | ||
startDelimiter: STX, | ||
endDelimiter: ETX, | ||
}) | ||
parser.on('data', spy) | ||
parser.write(Buffer.from(`${STX}I love robots${ETX}${STX}Each `)) | ||
parser.write(Buffer.from(`and Every One${ETX}`)) | ||
parser.write(Buffer.from(STX)) | ||
parser.write(Buffer.from(`even you!`)) | ||
|
||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from('I love robots')) | ||
assert.deepEqual(spy.getCall(1).args[0], Buffer.from('Each and Every One')) | ||
assert(spy.calledTwice) | ||
}) | ||
|
||
it('includes startDelimiter when includeStartDelimiter is true', () => { | ||
const spy = sinon.spy() | ||
const parser = new StartEndParser({ | ||
startDelimiter: STX, | ||
endDelimiter: ETX, | ||
includeStartDelimiter: true, | ||
}) | ||
parser.on('data', spy) | ||
parser.write(Buffer.from(`${STX}I love robots${ETX}${STX}Each `)) | ||
parser.write(Buffer.from(`and Every One${ETX}`)) | ||
parser.write(Buffer.from(STX)) | ||
parser.write(Buffer.from(`even you!`)) | ||
|
||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from(`${STX}I love robots`)) | ||
assert.deepEqual(spy.getCall(1).args[0], Buffer.from(`${STX}Each and Every One`)) | ||
assert(spy.calledTwice) | ||
}) | ||
|
||
it('includes endDelimiter when includeEndDelimiter is true', () => { | ||
const spy = sinon.spy() | ||
const parser = new StartEndParser({ | ||
startDelimiter: STX, | ||
endDelimiter: ETX, | ||
includeEndDelimiter: true, | ||
}) | ||
parser.on('data', spy) | ||
parser.write(Buffer.from(`${STX}I love robots${ETX}${STX}Each `)) | ||
parser.write(Buffer.from(`and Every One${ETX}`)) | ||
parser.write(Buffer.from(STX)) | ||
parser.write(Buffer.from(`even you!`)) | ||
|
||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from(`I love robots${ETX}`)) | ||
assert.deepEqual(spy.getCall(1).args[0], Buffer.from(`Each and Every One${ETX}`)) | ||
assert(spy.calledTwice) | ||
}) | ||
|
||
it('includes both delimiters when includeStartDelimiter and includeEndDelimiter are true', () => { | ||
const spy = sinon.spy() | ||
const parser = new StartEndParser({ | ||
startDelimiter: STX, | ||
endDelimiter: ETX, | ||
includeStartDelimiter: true, | ||
includeEndDelimiter: true, | ||
}) | ||
parser.on('data', spy) | ||
parser.write(Buffer.from(`${STX}I love robots${ETX}${STX}Each `)) | ||
parser.write(Buffer.from(`and Every One${ETX}`)) | ||
parser.write(Buffer.from(STX)) | ||
parser.write(Buffer.from(`even you!`)) | ||
|
||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from(`${STX}I love robots${ETX}`)) | ||
assert.deepEqual(spy.getCall(1).args[0], Buffer.from(`${STX}Each and Every One${ETX}`)) | ||
assert(spy.calledTwice) | ||
}) | ||
|
||
it('flushes remaining data when the stream ends', () => { | ||
const parser = new StartEndParser({ startDelimiter: STX, endDelimiter: ETX }) | ||
const spy = sinon.spy() | ||
parser.on('data', spy) | ||
parser.write(Buffer.from([1])) | ||
assert.equal(spy.callCount, 0) | ||
parser.end() | ||
assert.equal(spy.callCount, 1) | ||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from([1])) | ||
}) | ||
|
||
it('throws when not provided with a startDelimiter', () => { | ||
assert.throws(() => { | ||
new StartEndParser({ endDelimiter: ETX } as any) | ||
}) | ||
assert.throws(() => { | ||
new (StartEndParser as any)({ endDelimiter: ETX }) | ||
}) | ||
}) | ||
|
||
it('throws when not provided with an endDelimiter', () => { | ||
assert.throws(() => { | ||
new StartEndParser({ startDelimiter: STX } as any) | ||
}) | ||
assert.throws(() => { | ||
new (StartEndParser as any)({ startDelimiter: STX }) | ||
}) | ||
}) | ||
|
||
it(`throws when called with a 0 length startDelimiter`, () => { | ||
assert.throws(() => { | ||
new StartEndParser({ | ||
startDelimiter: Buffer.alloc(0), | ||
} as any) | ||
}) | ||
|
||
assert.throws(() => { | ||
new StartEndParser({ | ||
startDelimiter: '', | ||
} as any) | ||
}) | ||
|
||
assert.throws(() => { | ||
new StartEndParser({ | ||
startDelimiter: [], | ||
} as any) | ||
}) | ||
}) | ||
|
||
it(`throws when called with a 0 length endDelimiter`, () => { | ||
assert.throws(() => { | ||
new StartEndParser({ | ||
endDelimiter: Buffer.alloc(0), | ||
} as any) | ||
}) | ||
|
||
assert.throws(() => { | ||
new StartEndParser({ | ||
endDelimiter: '', | ||
} as any) | ||
}) | ||
|
||
assert.throws(() => { | ||
new StartEndParser({ | ||
endDelimiter: [], | ||
} as any) | ||
}) | ||
}) | ||
|
||
it(`allows setting of the startDelimiter and endDelimiter with strings`, () => { | ||
new StartEndParser({ startDelimiter: 'string', endDelimiter: 'string' }) | ||
}) | ||
|
||
it(`allows setting of the startDelimiter and endDelimiter with buffers`, () => { | ||
new StartEndParser({ startDelimiter: Buffer.from([1]), endDelimiter: Buffer.from([1]) }) | ||
}) | ||
|
||
it(`allows setting of the startDelimiter and endDelimiter with arrays of bytes`, () => { | ||
new StartEndParser({ startDelimiter: [1], endDelimiter: [1] }) | ||
}) | ||
|
||
it('Works when buffer starts with [startDelimiter, endDelimiter]', () => { | ||
const data = Buffer.from(`${STX}${ETX}${STX}Hello${ETX}${STX}World${ETX}`) | ||
const parser = new StartEndParser({ startDelimiter: STX, endDelimiter: ETX }) | ||
const spy = sinon.spy() | ||
parser.on('data', spy) | ||
parser.write(data) | ||
assert.equal(spy.callCount, 2) | ||
}) | ||
|
||
it('continues looking for delimiters in the next buffers', () => { | ||
const parser = new StartEndParser({ startDelimiter: STX, endDelimiter: ETX }) | ||
const spy = sinon.spy() | ||
parser.on('data', spy) | ||
parser.write(Buffer.from(`${STX}This could be${ETX}${STX}binary `)) | ||
parser.write(Buffer.from(`data${ETX}${STX}sent from a Moteino${ETX}`)) | ||
assert.equal(spy.callCount, 3) | ||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from('This could be')) | ||
assert.deepEqual(spy.getCall(1).args[0], Buffer.from('binary data')) | ||
assert.deepEqual(spy.getCall(2).args[0], Buffer.from('sent from a Moteino')) | ||
}) | ||
|
||
it('works if a multibyte delimiter crosses a chunk boundary', () => { | ||
const parser = new StartEndParser({ | ||
startDelimiter: [7, 7], | ||
endDelimiter: [8, 8], | ||
}) | ||
const spy = sinon.spy() | ||
parser.on('data', spy) | ||
parser.write(Buffer.from([1, 2, 3, 7])) | ||
parser.write(Buffer.from([7, 2, 3, 8])) | ||
parser.write(Buffer.from([8])) | ||
assert.equal(spy.callCount, 1) | ||
assert.deepEqual(spy.getCall(0).args[0], Buffer.from([2, 3])) | ||
}) | ||
}) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
import { Transform, TransformCallback, TransformOptions } from 'stream' | ||
|
||
export interface StartEndOptions extends TransformOptions { | ||
/** The delimiter on which an incoming block of data is considered to start. */ | ||
startDelimiter: string | Buffer | number[] | ||
/** The delimiter on which an incoming block of data is considered to end. */ | ||
endDelimiter: string | Buffer | number[] | ||
/** Should the startDelimiter be included at the start of data. Defaults to `false` */ | ||
includeStartDelimiter?: boolean | ||
/** Should the endDelimiter be included at the end of data. Defaults to `false` */ | ||
includeEndDelimiter?: boolean | ||
} | ||
|
||
/** | ||
* A transform stream that emits data each time a byte sequence is received. | ||
* @extends Transform | ||
* | ||
* To use the `StartEnd` parser, provide the startDelimiter and endDelimiter as a strings, buffers, or arrays of bytes. | ||
*/ | ||
export class StartEndParser extends Transform { | ||
startDelimiter: Buffer | ||
endDelimiter: Buffer | ||
includeStartDelimiter: boolean | ||
includeEndDelimiter: boolean | ||
buffer: Buffer | ||
|
||
constructor({ startDelimiter, endDelimiter, includeStartDelimiter = false, includeEndDelimiter = false, ...options }: StartEndOptions) { | ||
super(options) | ||
|
||
if (startDelimiter === undefined) { | ||
throw new TypeError('"startDelimiter" is not a bufferable object') | ||
} | ||
|
||
if (endDelimiter === undefined) { | ||
throw new TypeError('"endDelimiter" is not a bufferable object') | ||
} | ||
|
||
if (startDelimiter.length === 0) { | ||
throw new TypeError('"startDelimiter" has a 0 or undefined length') | ||
} | ||
|
||
if (endDelimiter.length === 0) { | ||
throw new TypeError('"endDelimiter" has a 0 or undefined length') | ||
} | ||
|
||
this.startDelimiter = Buffer.from(startDelimiter) | ||
this.endDelimiter = Buffer.from(endDelimiter) | ||
this.includeStartDelimiter = includeStartDelimiter | ||
this.includeEndDelimiter = includeEndDelimiter | ||
this.buffer = Buffer.alloc(0) | ||
} | ||
|
||
_transform(chunk: Buffer, encoding: BufferEncoding, cb: TransformCallback) { | ||
let data = Buffer.concat([this.buffer, chunk]) | ||
let startIndex: number | ||
let endIndex: number | ||
|
||
do { | ||
startIndex = data.indexOf(this.startDelimiter) | ||
endIndex = data.indexOf(this.endDelimiter, startIndex + this.startDelimiter.length) | ||
|
||
if (startIndex >= 0 && endIndex >= 0) { | ||
const block = data.slice( | ||
startIndex + (this.includeStartDelimiter ? 0 : this.startDelimiter.length), | ||
endIndex + (this.includeEndDelimiter ? this.endDelimiter.length : 0) | ||
) | ||
|
||
this.push(block) | ||
data = data.slice(endIndex + this.endDelimiter.length) | ||
} | ||
} while (startIndex >= 0 && endIndex >= 0) | ||
|
||
this.buffer = data | ||
cb() | ||
} | ||
|
||
_flush(cb: TransformCallback) { | ||
this.push(this.buffer) | ||
this.buffer = Buffer.alloc(0) | ||
cb() | ||
} | ||
} |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
{ | ||
"name": "@serialport/parser-start-end", | ||
"main": "./dist/index.js", | ||
"types": "./dist/index.d.ts", | ||
"version": "10.5.0", | ||
"engines": { | ||
"node": ">=12.0.0" | ||
}, | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"license": "MIT", | ||
"scripts": { | ||
"build": "tsc --build tsconfig-build.json" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "git://github.com/serialport/node-serialport.git" | ||
}, | ||
"funding": "https://opencollective.com/serialport/donate", | ||
"devDependencies": { | ||
"typescript": "^4.5.5" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
{ | ||
"extends": "../../tsconfig-build.json", | ||
"compilerOptions": { | ||
"outDir": "dist", | ||
"rootDir": "lib" | ||
}, | ||
"exclude": ["node_modules", "**/*.test.ts", "dist"] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
{ | ||
"extends": "../../tsconfig.json" | ||
} |