Init
This commit is contained in:
16
node_modules/bl/.github/dependabot.yml
generated
vendored
Normal file
16
node_modules/bl/.github/dependabot.yml
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: 'github-actions'
|
||||
directory: '/'
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
commit-message:
|
||||
prefix: 'chore'
|
||||
include: 'scope'
|
||||
- package-ecosystem: 'npm'
|
||||
directory: '/'
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
commit-message:
|
||||
prefix: 'chore'
|
||||
include: 'scope'
|
||||
61
node_modules/bl/.github/workflows/test-and-release.yml
generated
vendored
Normal file
61
node_modules/bl/.github/workflows/test-and-release.yml
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
name: Test & Maybe Release
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node: [18.x, 20.x, lts/*, current]
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node }}
|
||||
uses: actions/setup-node@v4.0.3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --no-progress
|
||||
- name: Run tests
|
||||
run: |
|
||||
npm config set script-shell bash
|
||||
npm run test:ci
|
||||
release:
|
||||
name: Release
|
||||
needs: test
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4.0.3
|
||||
with:
|
||||
node-version: lts/*
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
npm install --no-progress --no-package-lock --no-save
|
||||
- name: Build
|
||||
run: |
|
||||
npm run build
|
||||
- name: Install plugins
|
||||
run: |
|
||||
npm install \
|
||||
@semantic-release/commit-analyzer \
|
||||
conventional-changelog-conventionalcommits \
|
||||
@semantic-release/release-notes-generator \
|
||||
@semantic-release/npm \
|
||||
@semantic-release/github \
|
||||
@semantic-release/git \
|
||||
@semantic-release/changelog \
|
||||
--no-progress --no-package-lock --no-save
|
||||
- name: Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
run: npx semantic-release
|
||||
|
||||
418
node_modules/bl/BufferList.d.ts
generated
vendored
Normal file
418
node_modules/bl/BufferList.d.ts
generated
vendored
Normal file
@ -0,0 +1,418 @@
|
||||
export type BufferListAcceptedTypes =
|
||||
| Buffer
|
||||
| BufferList
|
||||
| Uint8Array
|
||||
| BufferListAcceptedTypes[]
|
||||
| string
|
||||
| number;
|
||||
|
||||
export interface BufferListConstructor {
|
||||
new (initData?: BufferListAcceptedTypes): BufferList;
|
||||
(initData?: BufferListAcceptedTypes): BufferList;
|
||||
|
||||
/**
|
||||
* Determines if the passed object is a BufferList. It will return true
|
||||
* if the passed object is an instance of BufferList or BufferListStream
|
||||
* and false otherwise.
|
||||
*
|
||||
* N.B. this won't return true for BufferList or BufferListStream instances
|
||||
* created by versions of this library before this static method was added.
|
||||
*
|
||||
* @param other
|
||||
*/
|
||||
|
||||
isBufferList(other: unknown): boolean;
|
||||
}
|
||||
|
||||
interface BufferList {
|
||||
prototype: Object
|
||||
|
||||
/**
|
||||
* Get the length of the list in bytes. This is the sum of the lengths
|
||||
* of all of the buffers contained in the list, minus any initial offset
|
||||
* for a semi-consumed buffer at the beginning. Should accurately
|
||||
* represent the total number of bytes that can be read from the list.
|
||||
*/
|
||||
|
||||
length: number;
|
||||
|
||||
/**
|
||||
* Adds an additional buffer or BufferList to the internal list.
|
||||
* this is returned so it can be chained.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
|
||||
append(buffer: BufferListAcceptedTypes): this;
|
||||
|
||||
/**
|
||||
* Will return the byte at the specified index.
|
||||
* @param index
|
||||
*/
|
||||
|
||||
get(index: number): number;
|
||||
|
||||
/**
|
||||
* Returns a new Buffer object containing the bytes within the
|
||||
* range specified. Both start and end are optional and will
|
||||
* default to the beginning and end of the list respectively.
|
||||
*
|
||||
* If the requested range spans a single internal buffer then a
|
||||
* slice of that buffer will be returned which shares the original
|
||||
* memory range of that Buffer. If the range spans multiple buffers
|
||||
* then copy operations will likely occur to give you a uniform Buffer.
|
||||
*
|
||||
* @param start
|
||||
* @param end
|
||||
*/
|
||||
|
||||
slice(start?: number, end?: number): Buffer;
|
||||
|
||||
/**
|
||||
* Returns a new BufferList object containing the bytes within the
|
||||
* range specified. Both start and end are optional and will default
|
||||
* to the beginning and end of the list respectively.
|
||||
*
|
||||
* No copies will be performed. All buffers in the result share
|
||||
* memory with the original list.
|
||||
*
|
||||
* @param start
|
||||
* @param end
|
||||
*/
|
||||
|
||||
shallowSlice(start?: number, end?: number): this;
|
||||
|
||||
/**
|
||||
* Copies the content of the list in the `dest` buffer, starting from
|
||||
* `destStart` and containing the bytes within the range specified
|
||||
* with `srcStart` to `srcEnd`.
|
||||
*
|
||||
* `destStart`, `start` and `end` are optional and will default to the
|
||||
* beginning of the dest buffer, and the beginning and end of the
|
||||
* list respectively.
|
||||
*
|
||||
* @param dest
|
||||
* @param destStart
|
||||
* @param srcStart
|
||||
* @param srcEnd
|
||||
*/
|
||||
|
||||
copy(
|
||||
dest: Buffer,
|
||||
destStart?: number,
|
||||
srcStart?: number,
|
||||
srcEnd?: number
|
||||
): Buffer;
|
||||
|
||||
/**
|
||||
* Performs a shallow-copy of the list. The internal Buffers remains the
|
||||
* same, so if you change the underlying Buffers, the change will be
|
||||
* reflected in both the original and the duplicate.
|
||||
*
|
||||
* This method is needed if you want to call consume() or pipe() and
|
||||
* still keep the original list.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
* var bl = new BufferListStream();
|
||||
* bl.append('hello');
|
||||
* bl.append(' world');
|
||||
* bl.append('\n');
|
||||
* bl.duplicate().pipe(process.stdout, { end: false });
|
||||
*
|
||||
* console.log(bl.toString())
|
||||
* ```
|
||||
*/
|
||||
|
||||
duplicate(): this;
|
||||
|
||||
/**
|
||||
* Will shift bytes off the start of the list. The number of bytes
|
||||
* consumed don't need to line up with the sizes of the internal
|
||||
* Buffers—initial offsets will be calculated accordingly in order
|
||||
* to give you a consistent view of the data.
|
||||
*
|
||||
* @param bytes
|
||||
*/
|
||||
|
||||
consume(bytes?: number): void;
|
||||
|
||||
/**
|
||||
* Will return a string representation of the buffer. The optional
|
||||
* `start` and `end` arguments are passed on to `slice()`, while
|
||||
* the encoding is passed on to `toString()` of the resulting Buffer.
|
||||
*
|
||||
* See the [`Buffer#toString()`](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end)
|
||||
* documentation for more information.
|
||||
*
|
||||
* @param encoding
|
||||
* @param start
|
||||
* @param end
|
||||
*/
|
||||
|
||||
toString(encoding?: string, start?: number, end?: number): string;
|
||||
|
||||
/**
|
||||
* Will return the byte at the specified index. indexOf() method
|
||||
* returns the first index at which a given element can be found
|
||||
* in the BufferList, or -1 if it is not present.
|
||||
*
|
||||
* @param value
|
||||
* @param byteOffset
|
||||
* @param encoding
|
||||
*/
|
||||
|
||||
indexOf(
|
||||
value: string | number | Uint8Array | BufferList | Buffer,
|
||||
byteOffset?: number,
|
||||
encoding?: string
|
||||
): number;
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readDoubleBE: Buffer['readDoubleBE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readDoubleLE: Buffer['readDoubleLE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readFloatBE: Buffer['readFloatBE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readFloatLE: Buffer['readFloatLE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readBigInt64BE: Buffer['readBigInt64BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readBigInt64LE: Buffer['readBigInt64LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readBigUInt64BE: Buffer['readBigUInt64BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readBigUInt64LE: Buffer['readBigUInt64LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readInt32BE: Buffer['readInt32BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readInt32LE: Buffer['readInt32LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUInt32BE: Buffer['readUInt32BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUInt32LE: Buffer['readUInt32LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readInt16BE: Buffer['readInt16BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readInt16LE: Buffer['readInt16LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUInt16BE: Buffer['readUInt16BE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUInt16LE: Buffer['readUInt16LE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readInt8: Buffer['readInt8'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUInt8: Buffer['readUInt8'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readIntBE: Buffer['readIntBE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readIntLE: Buffer['readIntLE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUIntBE: Buffer['readUIntBE'];
|
||||
|
||||
/**
|
||||
* All of the standard byte-reading methods of the Buffer interface are
|
||||
* implemented and will operate across internal Buffer boundaries transparently.
|
||||
*
|
||||
* See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html)
|
||||
* documentation for how these work.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
readUIntLE: Buffer['readUIntLE'];
|
||||
}
|
||||
|
||||
/**
|
||||
* No arguments are required for the constructor, but you can initialise
|
||||
* the list by passing in a single Buffer object or an array of Buffer
|
||||
* objects.
|
||||
*
|
||||
* `new` is not strictly required, if you don't instantiate a new object,
|
||||
* it will be done automatically for you so you can create a new instance
|
||||
* simply with:
|
||||
*
|
||||
* ```js
|
||||
* const { BufferList } = require('bl')
|
||||
* const bl = BufferList()
|
||||
*
|
||||
* // equivalent to:
|
||||
*
|
||||
* const { BufferList } = require('bl')
|
||||
* const bl = new BufferList()
|
||||
* ```
|
||||
*/
|
||||
|
||||
declare const BufferList: BufferListConstructor;
|
||||
400
node_modules/bl/BufferList.js
generated
vendored
Normal file
400
node_modules/bl/BufferList.js
generated
vendored
Normal file
@ -0,0 +1,400 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const symbol = Symbol.for('BufferList')
|
||||
|
||||
function BufferList (buf) {
|
||||
if (!(this instanceof BufferList)) {
|
||||
return new BufferList(buf)
|
||||
}
|
||||
|
||||
BufferList._init.call(this, buf)
|
||||
}
|
||||
|
||||
BufferList._init = function _init (buf) {
|
||||
Object.defineProperty(this, symbol, { value: true })
|
||||
|
||||
this._bufs = []
|
||||
this.length = 0
|
||||
|
||||
if (buf) {
|
||||
this.append(buf)
|
||||
}
|
||||
}
|
||||
|
||||
BufferList.prototype._new = function _new (buf) {
|
||||
return new BufferList(buf)
|
||||
}
|
||||
|
||||
BufferList.prototype._offset = function _offset (offset) {
|
||||
if (offset === 0) {
|
||||
return [0, 0]
|
||||
}
|
||||
|
||||
let tot = 0
|
||||
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
const _t = tot + this._bufs[i].length
|
||||
if (offset < _t || i === this._bufs.length - 1) {
|
||||
return [i, offset - tot]
|
||||
}
|
||||
tot = _t
|
||||
}
|
||||
}
|
||||
|
||||
BufferList.prototype._reverseOffset = function (blOffset) {
|
||||
const bufferId = blOffset[0]
|
||||
let offset = blOffset[1]
|
||||
|
||||
for (let i = 0; i < bufferId; i++) {
|
||||
offset += this._bufs[i].length
|
||||
}
|
||||
|
||||
return offset
|
||||
}
|
||||
|
||||
BufferList.prototype.get = function get (index) {
|
||||
if (index > this.length || index < 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const offset = this._offset(index)
|
||||
|
||||
return this._bufs[offset[0]][offset[1]]
|
||||
}
|
||||
|
||||
BufferList.prototype.slice = function slice (start, end) {
|
||||
if (typeof start === 'number' && start < 0) {
|
||||
start += this.length
|
||||
}
|
||||
|
||||
if (typeof end === 'number' && end < 0) {
|
||||
end += this.length
|
||||
}
|
||||
|
||||
return this.copy(null, 0, start, end)
|
||||
}
|
||||
|
||||
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
|
||||
if (typeof srcStart !== 'number' || srcStart < 0) {
|
||||
srcStart = 0
|
||||
}
|
||||
|
||||
if (typeof srcEnd !== 'number' || srcEnd > this.length) {
|
||||
srcEnd = this.length
|
||||
}
|
||||
|
||||
if (srcStart >= this.length) {
|
||||
return dst || Buffer.alloc(0)
|
||||
}
|
||||
|
||||
if (srcEnd <= 0) {
|
||||
return dst || Buffer.alloc(0)
|
||||
}
|
||||
|
||||
const copy = !!dst
|
||||
const off = this._offset(srcStart)
|
||||
const len = srcEnd - srcStart
|
||||
let bytes = len
|
||||
let bufoff = (copy && dstStart) || 0
|
||||
let start = off[1]
|
||||
|
||||
// copy/slice everything
|
||||
if (srcStart === 0 && srcEnd === this.length) {
|
||||
if (!copy) {
|
||||
// slice, but full concat if multiple buffers
|
||||
return this._bufs.length === 1
|
||||
? this._bufs[0]
|
||||
: Buffer.concat(this._bufs, this.length)
|
||||
}
|
||||
|
||||
// copy, need to copy individual buffers
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
this._bufs[i].copy(dst, bufoff)
|
||||
bufoff += this._bufs[i].length
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
// easy, cheap case where it's a subset of one of the buffers
|
||||
if (bytes <= this._bufs[off[0]].length - start) {
|
||||
return copy
|
||||
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
|
||||
: this._bufs[off[0]].slice(start, start + bytes)
|
||||
}
|
||||
|
||||
if (!copy) {
|
||||
// a slice, we need something to copy in to
|
||||
dst = Buffer.allocUnsafe(len)
|
||||
}
|
||||
|
||||
for (let i = off[0]; i < this._bufs.length; i++) {
|
||||
const l = this._bufs[i].length - start
|
||||
|
||||
if (bytes > l) {
|
||||
this._bufs[i].copy(dst, bufoff, start)
|
||||
bufoff += l
|
||||
} else {
|
||||
this._bufs[i].copy(dst, bufoff, start, start + bytes)
|
||||
bufoff += l
|
||||
break
|
||||
}
|
||||
|
||||
bytes -= l
|
||||
|
||||
if (start) {
|
||||
start = 0
|
||||
}
|
||||
}
|
||||
|
||||
// safeguard so that we don't return uninitialized memory
|
||||
if (dst.length > bufoff) return dst.slice(0, bufoff)
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
|
||||
start = start || 0
|
||||
end = typeof end !== 'number' ? this.length : end
|
||||
|
||||
if (start < 0) {
|
||||
start += this.length
|
||||
}
|
||||
|
||||
if (end < 0) {
|
||||
end += this.length
|
||||
}
|
||||
|
||||
if (start === end) {
|
||||
return this._new()
|
||||
}
|
||||
|
||||
const startOffset = this._offset(start)
|
||||
const endOffset = this._offset(end)
|
||||
const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
|
||||
|
||||
if (endOffset[1] === 0) {
|
||||
buffers.pop()
|
||||
} else {
|
||||
buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1])
|
||||
}
|
||||
|
||||
if (startOffset[1] !== 0) {
|
||||
buffers[0] = buffers[0].slice(startOffset[1])
|
||||
}
|
||||
|
||||
return this._new(buffers)
|
||||
}
|
||||
|
||||
BufferList.prototype.toString = function toString (encoding, start, end) {
|
||||
return this.slice(start, end).toString(encoding)
|
||||
}
|
||||
|
||||
BufferList.prototype.consume = function consume (bytes) {
|
||||
// first, normalize the argument, in accordance with how Buffer does it
|
||||
bytes = Math.trunc(bytes)
|
||||
// do nothing if not a positive number
|
||||
if (Number.isNaN(bytes) || bytes <= 0) return this
|
||||
|
||||
while (this._bufs.length) {
|
||||
if (bytes >= this._bufs[0].length) {
|
||||
bytes -= this._bufs[0].length
|
||||
this.length -= this._bufs[0].length
|
||||
this._bufs.shift()
|
||||
} else {
|
||||
this._bufs[0] = this._bufs[0].slice(bytes)
|
||||
this.length -= bytes
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
BufferList.prototype.duplicate = function duplicate () {
|
||||
const copy = this._new()
|
||||
|
||||
for (let i = 0; i < this._bufs.length; i++) {
|
||||
copy.append(this._bufs[i])
|
||||
}
|
||||
|
||||
return copy
|
||||
}
|
||||
|
||||
BufferList.prototype.append = function append (buf) {
|
||||
if (buf == null) {
|
||||
return this
|
||||
}
|
||||
|
||||
if (buf.buffer) {
|
||||
// append a view of the underlying ArrayBuffer
|
||||
this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength))
|
||||
} else if (Array.isArray(buf)) {
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
this.append(buf[i])
|
||||
}
|
||||
} else if (this._isBufferList(buf)) {
|
||||
// unwrap argument into individual BufferLists
|
||||
for (let i = 0; i < buf._bufs.length; i++) {
|
||||
this.append(buf._bufs[i])
|
||||
}
|
||||
} else {
|
||||
// coerce number arguments to strings, since Buffer(number) does
|
||||
// uninitialized memory allocation
|
||||
if (typeof buf === 'number') {
|
||||
buf = buf.toString()
|
||||
}
|
||||
|
||||
this._appendBuffer(Buffer.from(buf))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
|
||||
this._bufs.push(buf)
|
||||
this.length += buf.length
|
||||
}
|
||||
|
||||
BufferList.prototype.indexOf = function (search, offset, encoding) {
|
||||
if (encoding === undefined && typeof offset === 'string') {
|
||||
encoding = offset
|
||||
offset = undefined
|
||||
}
|
||||
|
||||
if (typeof search === 'function' || Array.isArray(search)) {
|
||||
throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.')
|
||||
} else if (typeof search === 'number') {
|
||||
search = Buffer.from([search])
|
||||
} else if (typeof search === 'string') {
|
||||
search = Buffer.from(search, encoding)
|
||||
} else if (this._isBufferList(search)) {
|
||||
search = search.slice()
|
||||
} else if (Array.isArray(search.buffer)) {
|
||||
search = Buffer.from(search.buffer, search.byteOffset, search.byteLength)
|
||||
} else if (!Buffer.isBuffer(search)) {
|
||||
search = Buffer.from(search)
|
||||
}
|
||||
|
||||
offset = Number(offset || 0)
|
||||
|
||||
if (isNaN(offset)) {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
if (offset < 0) {
|
||||
offset = this.length + offset
|
||||
}
|
||||
|
||||
if (offset < 0) {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
if (search.length === 0) {
|
||||
return offset > this.length ? this.length : offset
|
||||
}
|
||||
|
||||
const blOffset = this._offset(offset)
|
||||
let blIndex = blOffset[0] // index of which internal buffer we're working on
|
||||
let buffOffset = blOffset[1] // offset of the internal buffer we're working on
|
||||
|
||||
// scan over each buffer
|
||||
for (; blIndex < this._bufs.length; blIndex++) {
|
||||
const buff = this._bufs[blIndex]
|
||||
|
||||
while (buffOffset < buff.length) {
|
||||
const availableWindow = buff.length - buffOffset
|
||||
|
||||
if (availableWindow >= search.length) {
|
||||
const nativeSearchResult = buff.indexOf(search, buffOffset)
|
||||
|
||||
if (nativeSearchResult !== -1) {
|
||||
return this._reverseOffset([blIndex, nativeSearchResult])
|
||||
}
|
||||
|
||||
buffOffset = buff.length - search.length + 1 // end of native search window
|
||||
} else {
|
||||
const revOffset = this._reverseOffset([blIndex, buffOffset])
|
||||
|
||||
if (this._match(revOffset, search)) {
|
||||
return revOffset
|
||||
}
|
||||
|
||||
buffOffset++
|
||||
}
|
||||
}
|
||||
|
||||
buffOffset = 0
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
BufferList.prototype._match = function (offset, search) {
|
||||
if (this.length - offset < search.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (let searchOffset = 0; searchOffset < search.length; searchOffset++) {
|
||||
if (this.get(offset + searchOffset) !== search[searchOffset]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
;(function () {
|
||||
const methods = {
|
||||
readDoubleBE: 8,
|
||||
readDoubleLE: 8,
|
||||
readFloatBE: 4,
|
||||
readFloatLE: 4,
|
||||
readBigInt64BE: 8,
|
||||
readBigInt64LE: 8,
|
||||
readBigUInt64BE: 8,
|
||||
readBigUInt64LE: 8,
|
||||
readInt32BE: 4,
|
||||
readInt32LE: 4,
|
||||
readUInt32BE: 4,
|
||||
readUInt32LE: 4,
|
||||
readInt16BE: 2,
|
||||
readInt16LE: 2,
|
||||
readUInt16BE: 2,
|
||||
readUInt16LE: 2,
|
||||
readInt8: 1,
|
||||
readUInt8: 1,
|
||||
readIntBE: null,
|
||||
readIntLE: null,
|
||||
readUIntBE: null,
|
||||
readUIntLE: null
|
||||
}
|
||||
|
||||
for (const m in methods) {
|
||||
(function (m) {
|
||||
if (methods[m] === null) {
|
||||
BufferList.prototype[m] = function (offset, byteLength) {
|
||||
return this.slice(offset, offset + byteLength)[m](0, byteLength)
|
||||
}
|
||||
} else {
|
||||
BufferList.prototype[m] = function (offset = 0) {
|
||||
return this.slice(offset, offset + methods[m])[m](0)
|
||||
}
|
||||
}
|
||||
}(m))
|
||||
}
|
||||
}())
|
||||
|
||||
// Used internally by the class and also as an indicator of this object being
|
||||
// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser
|
||||
// environment because there could be multiple different copies of the
|
||||
// BufferList class and some `BufferList`s might be `BufferList`s.
|
||||
BufferList.prototype._isBufferList = function _isBufferList (b) {
|
||||
return b instanceof BufferList || BufferList.isBufferList(b)
|
||||
}
|
||||
|
||||
BufferList.isBufferList = function isBufferList (b) {
|
||||
return b != null && b[symbol]
|
||||
}
|
||||
|
||||
module.exports = BufferList
|
||||
142
node_modules/bl/CHANGELOG.md
generated
vendored
Normal file
142
node_modules/bl/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,142 @@
|
||||
## [6.0.14](https://github.com/rvagg/bl/compare/v6.0.13...v6.0.14) (2024-07-10)
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 4.0.2 to 4.0.3 ([09aa80d](https://github.com/rvagg/bl/commit/09aa80de083f045f0fd92414e97f2c241f8f15bf))
|
||||
|
||||
## [6.0.13](https://github.com/rvagg/bl/compare/v6.0.12...v6.0.13) (2024-06-21)
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 5.4.5 to 5.5.2 ([41eff82](https://github.com/rvagg/bl/commit/41eff826534912051ab60fe7c36baad7a3c09492))
|
||||
|
||||
## [6.0.12](https://github.com/rvagg/bl/compare/v6.0.11...v6.0.12) (2024-03-07)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 5.3.3 to 5.4.2 ([18e99a2](https://github.com/rvagg/bl/commit/18e99a233d82c0ff5f3b00b04aab5a4ce6d37452))
|
||||
|
||||
## [6.0.11](https://github.com/rvagg/bl/compare/v6.0.10...v6.0.11) (2024-02-08)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 4.0.1 to 4.0.2 ([d8ac460](https://github.com/rvagg/bl/commit/d8ac460597a24b0e783da2acd6ab37eacbbb0af5))
|
||||
* update Node.js versions in CI ([863a5e0](https://github.com/rvagg/bl/commit/863a5e02f2c144c54be88ff962b0a902684c6527))
|
||||
|
||||
## [6.0.10](https://github.com/rvagg/bl/compare/v6.0.9...v6.0.10) (2024-01-01)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 4.0.0 to 4.0.1 ([a018907](https://github.com/rvagg/bl/commit/a0189073aee3e906b135a37595f8b4007e6dd3e7))
|
||||
|
||||
## [6.0.9](https://github.com/rvagg/bl/compare/v6.0.8...v6.0.9) (2023-11-27)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 5.2.2 to 5.3.2 ([bb294bd](https://github.com/rvagg/bl/commit/bb294bd7baa5c5e1e062bd23b5d714692e04d414))
|
||||
|
||||
## [6.0.8](https://github.com/rvagg/bl/compare/v6.0.7...v6.0.8) (2023-10-25)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/checkout from 3 to 4 ([a9ad973](https://github.com/rvagg/bl/commit/a9ad973d1fe4e5f673fe3b9b72b4484136e1655d))
|
||||
* **deps:** bump actions/setup-node from 3.8.1 to 4.0.0 ([5921489](https://github.com/rvagg/bl/commit/59214897520fd6ba6d20a7cf370373275d4cfe1d))
|
||||
|
||||
## [6.0.7](https://github.com/rvagg/bl/compare/v6.0.6...v6.0.7) (2023-08-25)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 5.1.6 to 5.2.2 ([7e539ad](https://github.com/rvagg/bl/commit/7e539ad2e9cf959f431e82eaafe137cf33cf22ef))
|
||||
|
||||
## [6.0.6](https://github.com/rvagg/bl/compare/v6.0.5...v6.0.6) (2023-08-18)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 3.8.0 to 3.8.1 ([39d3e17](https://github.com/rvagg/bl/commit/39d3e1729f0a7ddeac21e02b7983b0255ea212a2))
|
||||
|
||||
## [6.0.5](https://github.com/rvagg/bl/compare/v6.0.4...v6.0.5) (2023-08-15)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 3.7.0 to 3.8.0 ([183d80a](https://github.com/rvagg/bl/commit/183d80a616a32e5473ac47e46cecd19ca0dfcf9f))
|
||||
|
||||
## [6.0.4](https://github.com/rvagg/bl/compare/v6.0.3...v6.0.4) (2023-08-07)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump @types/readable-stream from 2.3.15 to 4.0.0 ([dd8cdb0](https://github.com/rvagg/bl/commit/dd8cdb0c64e1272c21d3bb251971afaaabbb0a1b))
|
||||
|
||||
## [6.0.3](https://github.com/rvagg/bl/compare/v6.0.2...v6.0.3) (2023-07-07)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps:** bump actions/setup-node from 3.6.0 to 3.7.0 ([40ac0a5](https://github.com/rvagg/bl/commit/40ac0a52e3c1ef83ae95d9433aebe4135f79b761))
|
||||
|
||||
## [6.0.2](https://github.com/rvagg/bl/compare/v6.0.1...v6.0.2) (2023-06-05)
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 5.0.4 to 5.1.3 ([bea30ad](https://github.com/rvagg/bl/commit/bea30addef635d30f6e97769afacf5049615cdfe))
|
||||
|
||||
## [6.0.1](https://github.com/rvagg/bl/compare/v6.0.0...v6.0.1) (2023-03-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* release with Node.js 18 ([6965a1d](https://github.com/rvagg/bl/commit/6965a1dee6b2af5bca304c8c9b747b796a652ffd))
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* **deps-dev:** bump typescript from 4.9.5 to 5.0.2 ([0885658](https://github.com/rvagg/bl/commit/0885658f7c1696220ac846e5bbc19f8b6ae8d3c0))
|
||||
* **no-release:** bump actions/setup-node from 3.5.1 to 3.6.0 ([#120](https://github.com/rvagg/bl/issues/120)) ([60bee1b](https://github.com/rvagg/bl/commit/60bee1bd37a9f1a2a128f506f7da008c094db5c4))
|
||||
* **no-release:** bump typescript from 4.8.4 to 4.9.3 ([#118](https://github.com/rvagg/bl/issues/118)) ([8be6dd6](https://github.com/rvagg/bl/commit/8be6dd62f639fd6c2c2f7d5d6ac4db988adb1886))
|
||||
|
||||
## [6.0.0](https://github.com/rvagg/bl/compare/v5.1.0...v6.0.0) (2022-10-19)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **deps:** bump readable-stream from 3.6.0 to 4.2.0
|
||||
* added bigint (Int64) support
|
||||
|
||||
### Features
|
||||
|
||||
* added bigint (Int64) support ([131ad32](https://github.com/rvagg/bl/commit/131ad3217b91090323513a8ea3ef179e8427cf47))
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* add TypeScript definitions for BigInt ([78c5ff4](https://github.com/rvagg/bl/commit/78c5ff489235a4e4233086c364133123c71acef4))
|
||||
* **deps-dev:** bump typescript from 4.7.4 to 4.8.4 ([dba13e1](https://github.com/rvagg/bl/commit/dba13e1cadc5857dde6a9425e975faf2abbb270f))
|
||||
* **deps:** bump readable-stream from 3.6.0 to 4.2.0 ([fa03eda](https://github.com/rvagg/bl/commit/fa03eda54b4412c0fdfc9053bd0b0bebaf80bfd9))
|
||||
* **docs:** BigInt in API docs ([c68af50](https://github.com/rvagg/bl/commit/c68af500a04b2c3a14132ae6946412d2e39402d0))
|
||||
|
||||
## [5.1.0](https://github.com/rvagg/bl/compare/v5.0.0...v5.1.0) (2022-10-18)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* added integrated TypeScript typings ([#108](https://github.com/rvagg/bl/issues/108)) ([433ff89](https://github.com/rvagg/bl/commit/433ff8942f47fab8a5c9d13b2c00989ccf8d0710))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* windows support in tests ([387dfaf](https://github.com/rvagg/bl/commit/387dfaf9b2bca7849f12785436ceb01e42adac2c))
|
||||
|
||||
|
||||
### Trivial Changes
|
||||
|
||||
* GH Actions, Dependabot, auto-release, remove Travis ([997f058](https://github.com/rvagg/bl/commit/997f058357de8f2a7f66998e80a72b491835573f))
|
||||
* **no-release:** bump standard from 16.0.4 to 17.0.0 ([#112](https://github.com/rvagg/bl/issues/112)) ([078bfe3](https://github.com/rvagg/bl/commit/078bfe33390d125297b1c946e5989c4aa9228961))
|
||||
13
node_modules/bl/LICENSE.md
generated
vendored
Normal file
13
node_modules/bl/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
The MIT License (MIT)
|
||||
=====================
|
||||
|
||||
Copyright (c) 2013-2019 bl contributors
|
||||
----------------------------------
|
||||
|
||||
*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
247
node_modules/bl/README.md
generated
vendored
Normal file
247
node_modules/bl/README.md
generated
vendored
Normal file
@ -0,0 +1,247 @@
|
||||
# bl *(BufferList)*
|
||||
|
||||
[](https://travis-ci.com/rvagg/bl/)
|
||||
|
||||
**A Node.js Buffer list collector, reader and streamer thingy.**
|
||||
|
||||
[](https://nodei.co/npm/bl/)
|
||||
|
||||
**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them!
|
||||
|
||||
The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently.
|
||||
|
||||
```js
|
||||
const { BufferList } = require('bl')
|
||||
|
||||
const bl = new BufferList()
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append('hi') // bl will also accept & convert Strings
|
||||
bl.append(Buffer.from('j'))
|
||||
bl.append(Buffer.from([ 0x3, 0x4 ]))
|
||||
|
||||
console.log(bl.length) // 12
|
||||
|
||||
console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij'
|
||||
console.log(bl.slice(3, 10).toString('ascii')) // 'defghij'
|
||||
console.log(bl.slice(3, 6).toString('ascii')) // 'def'
|
||||
console.log(bl.slice(3, 8).toString('ascii')) // 'defgh'
|
||||
console.log(bl.slice(5, 10).toString('ascii')) // 'fghij'
|
||||
|
||||
console.log(bl.indexOf('def')) // 3
|
||||
console.log(bl.indexOf('asdf')) // -1
|
||||
|
||||
// or just use toString!
|
||||
console.log(bl.toString()) // 'abcdefghij\u0003\u0004'
|
||||
console.log(bl.toString('ascii', 3, 8)) // 'defgh'
|
||||
console.log(bl.toString('ascii', 5, 10)) // 'fghij'
|
||||
|
||||
// other standard Buffer readables
|
||||
console.log(bl.readUInt16BE(10)) // 0x0304
|
||||
console.log(bl.readUInt16LE(10)) // 0x0403
|
||||
```
|
||||
|
||||
Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**:
|
||||
|
||||
```js
|
||||
const { BufferListStream } = require('bl')
|
||||
const fs = require('fs')
|
||||
|
||||
fs.createReadStream('README.md')
|
||||
.pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required
|
||||
// `data` is a complete Buffer object containing the full data
|
||||
console.log(data.toString())
|
||||
}))
|
||||
```
|
||||
|
||||
Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream.
|
||||
|
||||
Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!):
|
||||
|
||||
```js
|
||||
const hyperquest = require('hyperquest')
|
||||
const { BufferListStream } = require('bl')
|
||||
|
||||
const url = 'https://raw.github.com/rvagg/bl/master/README.md'
|
||||
|
||||
hyperquest(url).pipe(BufferListStream((err, data) => {
|
||||
console.log(data.toString())
|
||||
}))
|
||||
```
|
||||
|
||||
Or, use it as a readable stream to recompose a list of Buffers to an output source:
|
||||
|
||||
```js
|
||||
const { BufferListStream } = require('bl')
|
||||
const fs = require('fs')
|
||||
|
||||
var bl = new BufferListStream()
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
|
||||
bl.pipe(fs.createWriteStream('gibberish.txt'))
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
* <a href="#ctor"><code><b>new BufferList([ buf ])</b></code></a>
|
||||
* <a href="#isBufferList"><code><b>BufferList.isBufferList(obj)</b></code></a>
|
||||
* <a href="#length"><code>bl.<b>length</b></code></a>
|
||||
* <a href="#append"><code>bl.<b>append(buffer)</b></code></a>
|
||||
* <a href="#get"><code>bl.<b>get(index)</b></code></a>
|
||||
* <a href="#indexOf"><code>bl.<b>indexOf(value[, byteOffset][, encoding])</b></code></a>
|
||||
* <a href="#slice"><code>bl.<b>slice([ start[, end ] ])</b></code></a>
|
||||
* <a href="#shallowSlice"><code>bl.<b>shallowSlice([ start[, end ] ])</b></code></a>
|
||||
* <a href="#copy"><code>bl.<b>copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])</b></code></a>
|
||||
* <a href="#duplicate"><code>bl.<b>duplicate()</b></code></a>
|
||||
* <a href="#consume"><code>bl.<b>consume(bytes)</b></code></a>
|
||||
* <a href="#toString"><code>bl.<b>toString([encoding, [ start, [ end ]]])</b></code></a>
|
||||
* <a href="#readXX"><code>bl.<b>readDoubleBE()</b></code>, <code>bl.<b>readDoubleLE()</b></code>, <code>bl.<b>readFloatBE()</b></code>, <code>bl.<b>readFloatLE()</b></code>, <code>bl.<b>readBigInt64BE()</b></code>, <code>bl.<b>readBigInt64LE()</b></code>, <code>bl.<b>readBigUInt64BE()</b></code>, <code>bl.<b>readBigUInt64LE()</b></code>, <code>bl.<b>readInt32BE()</b></code>, <code>bl.<b>readInt32LE()</b></code>, <code>bl.<b>readUInt32BE()</b></code>, <code>bl.<b>readUInt32LE()</b></code>, <code>bl.<b>readInt16BE()</b></code>, <code>bl.<b>readInt16LE()</b></code>, <code>bl.<b>readUInt16BE()</b></code>, <code>bl.<b>readUInt16LE()</b></code>, <code>bl.<b>readInt8()</b></code>, <code>bl.<b>readUInt8()</b></code></a>
|
||||
* <a href="#ctorStream"><code><b>new BufferListStream([ callback ])</b></code></a>
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="ctor"></a>
|
||||
### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ])
|
||||
No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects.
|
||||
|
||||
`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
|
||||
|
||||
```js
|
||||
const { BufferList } = require('bl')
|
||||
const bl = BufferList()
|
||||
|
||||
// equivalent to:
|
||||
|
||||
const { BufferList } = require('bl')
|
||||
const bl = new BufferList()
|
||||
```
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="isBufferList"></a>
|
||||
### BufferList.isBufferList(obj)
|
||||
Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise.
|
||||
|
||||
N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="length"></a>
|
||||
### bl.length
|
||||
Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="append"></a>
|
||||
### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
|
||||
`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="get"></a>
|
||||
### bl.get(index)
|
||||
`get()` will return the byte at the specified index.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="indexOf"></a>
|
||||
### bl.indexOf(value[, byteOffset][, encoding])
|
||||
`get()` will return the byte at the specified index.
|
||||
`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="slice"></a>
|
||||
### bl.slice([ start, [ end ] ])
|
||||
`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
|
||||
|
||||
If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="shallowSlice"></a>
|
||||
### bl.shallowSlice([ start, [ end ] ])
|
||||
`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively.
|
||||
|
||||
No copies will be performed. All buffers in the result share memory with the original list.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="copy"></a>
|
||||
### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ])
|
||||
`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="duplicate"></a>
|
||||
### bl.duplicate()
|
||||
`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example:
|
||||
|
||||
```js
|
||||
var bl = new BufferListStream()
|
||||
|
||||
bl.append('hello')
|
||||
bl.append(' world')
|
||||
bl.append('\n')
|
||||
|
||||
bl.duplicate().pipe(process.stdout, { end: false })
|
||||
|
||||
console.log(bl.toString())
|
||||
```
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="consume"></a>
|
||||
### bl.consume(bytes)
|
||||
`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="toString"></a>
|
||||
### bl.toString([encoding, [ start, [ end ]]])
|
||||
`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="readXX"></a>
|
||||
### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readBigInt64BE(), bl.readBigInt64LE(), bl.readBigUInt64BE(), bl.readBigUInt64LE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8()
|
||||
|
||||
All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently.
|
||||
|
||||
See the <b><code>[Buffer](http://nodejs.org/docs/latest/api/buffer.html)</code></b> documentation for how these work.
|
||||
|
||||
--------------------------------------------------------
|
||||
<a name="ctorStream"></a>
|
||||
### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
|
||||
**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance.
|
||||
|
||||
The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
|
||||
|
||||
Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
|
||||
|
||||
`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with:
|
||||
|
||||
```js
|
||||
const { BufferListStream } = require('bl')
|
||||
const bl = BufferListStream()
|
||||
|
||||
// equivalent to:
|
||||
|
||||
const { BufferListStream } = require('bl')
|
||||
const bl = new BufferListStream()
|
||||
```
|
||||
|
||||
N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`:
|
||||
|
||||
```js
|
||||
const { BufferListStream } = require('bl')
|
||||
// equivalent to:
|
||||
const BufferListStream = require('bl')
|
||||
```
|
||||
|
||||
--------------------------------------------------------
|
||||
|
||||
## Contributors
|
||||
|
||||
**bl** is brought to you by the following hackers:
|
||||
|
||||
* [Rod Vagg](https://github.com/rvagg)
|
||||
* [Matteo Collina](https://github.com/mcollina)
|
||||
* [Jarett Cruger](https://github.com/jcrugzz)
|
||||
|
||||
<a name="license"></a>
|
||||
## License & copyright
|
||||
|
||||
Copyright (c) 2013-2019 bl contributors (listed above).
|
||||
|
||||
bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.
|
||||
84
node_modules/bl/bl.js
generated
vendored
Normal file
84
node_modules/bl/bl.js
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
'use strict'
|
||||
|
||||
const DuplexStream = require('readable-stream').Duplex
|
||||
const inherits = require('inherits')
|
||||
const BufferList = require('./BufferList')
|
||||
|
||||
function BufferListStream (callback) {
|
||||
if (!(this instanceof BufferListStream)) {
|
||||
return new BufferListStream(callback)
|
||||
}
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
this._callback = callback
|
||||
|
||||
const piper = function piper (err) {
|
||||
if (this._callback) {
|
||||
this._callback(err)
|
||||
this._callback = null
|
||||
}
|
||||
}.bind(this)
|
||||
|
||||
this.on('pipe', function onPipe (src) {
|
||||
src.on('error', piper)
|
||||
})
|
||||
this.on('unpipe', function onUnpipe (src) {
|
||||
src.removeListener('error', piper)
|
||||
})
|
||||
|
||||
callback = null
|
||||
}
|
||||
|
||||
BufferList._init.call(this, callback)
|
||||
DuplexStream.call(this)
|
||||
}
|
||||
|
||||
inherits(BufferListStream, DuplexStream)
|
||||
Object.assign(BufferListStream.prototype, BufferList.prototype)
|
||||
|
||||
BufferListStream.prototype._new = function _new (callback) {
|
||||
return new BufferListStream(callback)
|
||||
}
|
||||
|
||||
BufferListStream.prototype._write = function _write (buf, encoding, callback) {
|
||||
this._appendBuffer(buf)
|
||||
|
||||
if (typeof callback === 'function') {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
BufferListStream.prototype._read = function _read (size) {
|
||||
if (!this.length) {
|
||||
return this.push(null)
|
||||
}
|
||||
|
||||
size = Math.min(size, this.length)
|
||||
this.push(this.slice(0, size))
|
||||
this.consume(size)
|
||||
}
|
||||
|
||||
BufferListStream.prototype.end = function end (chunk) {
|
||||
DuplexStream.prototype.end.call(this, chunk)
|
||||
|
||||
if (this._callback) {
|
||||
this._callback(null, this.slice())
|
||||
this._callback = null
|
||||
}
|
||||
}
|
||||
|
||||
BufferListStream.prototype._destroy = function _destroy (err, cb) {
|
||||
this._bufs.length = 0
|
||||
this.length = 0
|
||||
cb(err)
|
||||
}
|
||||
|
||||
BufferListStream.prototype._isBufferList = function _isBufferList (b) {
|
||||
return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b)
|
||||
}
|
||||
|
||||
BufferListStream.isBufferList = BufferList.isBufferList
|
||||
|
||||
module.exports = BufferListStream
|
||||
module.exports.BufferListStream = BufferListStream
|
||||
module.exports.BufferList = BufferList
|
||||
88
node_modules/bl/index.d.ts
generated
vendored
Normal file
88
node_modules/bl/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
import { Duplex } from "readable-stream";
|
||||
import {
|
||||
BufferList as BL,
|
||||
BufferListConstructor,
|
||||
BufferListAcceptedTypes,
|
||||
} from "./BufferList";
|
||||
|
||||
type BufferListStreamInit =
|
||||
| ((err: Error, buffer: Buffer) => void)
|
||||
| BufferListAcceptedTypes;
|
||||
|
||||
interface BufferListStreamConstructor {
|
||||
new (initData?: BufferListStreamInit): BufferListStream;
|
||||
(callback?: BufferListStreamInit): BufferListStream;
|
||||
|
||||
/**
|
||||
* Determines if the passed object is a BufferList. It will return true
|
||||
* if the passed object is an instance of BufferList or BufferListStream
|
||||
* and false otherwise.
|
||||
*
|
||||
* N.B. this won't return true for BufferList or BufferListStream instances
|
||||
* created by versions of this library before this static method was added.
|
||||
*
|
||||
* @param other
|
||||
*/
|
||||
|
||||
isBufferList(other: unknown): boolean;
|
||||
|
||||
/**
|
||||
* Rexporting BufferList and BufferListStream to fix
|
||||
* issue with require/commonjs import and "export = " below.
|
||||
*/
|
||||
|
||||
BufferList: BufferListConstructor;
|
||||
BufferListStream: BufferListStreamConstructor;
|
||||
}
|
||||
|
||||
interface BufferListStream extends Duplex, BL {
|
||||
prototype: BufferListStream & BL;
|
||||
}
|
||||
|
||||
/**
|
||||
* BufferListStream is a Node Duplex Stream, so it can be read from
|
||||
* and written to like a standard Node stream. You can also pipe()
|
||||
* to and from a BufferListStream instance.
|
||||
*
|
||||
* The constructor takes an optional callback, if supplied, the
|
||||
* callback will be called with an error argument followed by a
|
||||
* reference to the bl instance, when bl.end() is called
|
||||
* (i.e. from a piped stream).
|
||||
*
|
||||
* This is a convenient method of collecting the entire contents of
|
||||
* a stream, particularly when the stream is chunky, such as a network
|
||||
* stream.
|
||||
*
|
||||
* Normally, no arguments are required for the constructor, but you can
|
||||
* initialise the list by passing in a single Buffer object or an array
|
||||
* of Buffer object.
|
||||
*
|
||||
* `new` is not strictly required, if you don't instantiate a new object,
|
||||
* it will be done automatically for you so you can create a new instance
|
||||
* simply with:
|
||||
*
|
||||
* ```js
|
||||
* const { BufferListStream } = require('bl');
|
||||
* const bl = BufferListStream();
|
||||
*
|
||||
* // equivalent to:
|
||||
*
|
||||
* const { BufferListStream } = require('bl');
|
||||
* const bl = new BufferListStream();
|
||||
* ```
|
||||
*
|
||||
* N.B. For backwards compatibility reasons, BufferListStream is the default
|
||||
* export when you `require('bl')`:
|
||||
*
|
||||
* ```js
|
||||
* const { BufferListStream } = require('bl')
|
||||
*
|
||||
* // equivalent to:
|
||||
*
|
||||
* const BufferListStream = require('bl')
|
||||
* ```
|
||||
*/
|
||||
|
||||
declare const BufferListStream: BufferListStreamConstructor;
|
||||
|
||||
export = BufferListStream;
|
||||
47
node_modules/bl/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
47
node_modules/bl/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
Node.js is licensed for use as follows:
|
||||
|
||||
"""
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
This license applies to parts of Node.js originating from the
|
||||
https://github.com/joyent/node repository:
|
||||
|
||||
"""
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
116
node_modules/bl/node_modules/readable-stream/README.md
generated
vendored
Normal file
116
node_modules/bl/node_modules/readable-stream/README.md
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
# readable-stream
|
||||
|
||||
**_Node.js core streams for userland_**
|
||||
|
||||
[](https://npm.im/readable-stream)
|
||||
[](https://www.npmjs.org/package/readable-stream)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ANode.js)
|
||||
[](https://github.com/nodejs/readable-stream/actions?query=workflow%3ABrowsers)
|
||||
|
||||
```bash
|
||||
npm install readable-stream
|
||||
```
|
||||
|
||||
This package is a mirror of the streams implementations in Node.js 18.19.0.
|
||||
|
||||
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v18.19.0/docs/api/stream.html).
|
||||
|
||||
If you want to guarantee a stable streams base, regardless of what version of
|
||||
Node you, or the users of your libraries are using, use **readable-stream** _only_ and avoid the _"stream"_ module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
|
||||
|
||||
As of version 2.0.0 **readable-stream** uses semantic versioning.
|
||||
|
||||
## Version 4.x.x
|
||||
|
||||
v4.x.x of `readable-stream` is a cut from Node 18. This version supports Node 12, 14, 16 and 18, as well as evergreen browsers.
|
||||
The breaking changes introduced by v4 are composed of the combined breaking changes in:
|
||||
* [Node v12](https://nodejs.org/en/blog/release/v12.0.0/)
|
||||
* [Node v13](https://nodejs.org/en/blog/release/v13.0.0/)
|
||||
* [Node v14](https://nodejs.org/en/blog/release/v14.0.0/)
|
||||
* [Node v15](https://nodejs.org/en/blog/release/v15.0.0/)
|
||||
* [Node v16](https://nodejs.org/en/blog/release/v16.0.0/)
|
||||
* [Node v17](https://nodejs.org/en/blog/release/v17.0.0/)
|
||||
* [Node v18](https://nodejs.org/en/blog/release/v18.0.0/)
|
||||
|
||||
This also includes _many_ new features.
|
||||
|
||||
## Version 3.x.x
|
||||
|
||||
v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows:
|
||||
|
||||
1. Error codes: https://github.com/nodejs/node/pull/13310,
|
||||
https://github.com/nodejs/node/pull/13291,
|
||||
https://github.com/nodejs/node/pull/16589,
|
||||
https://github.com/nodejs/node/pull/15042,
|
||||
https://github.com/nodejs/node/pull/15665,
|
||||
https://github.com/nodejs/readable-stream/pull/344
|
||||
2. 'readable' have precedence over flowing
|
||||
https://github.com/nodejs/node/pull/18994
|
||||
3. make virtual methods errors consistent
|
||||
https://github.com/nodejs/node/pull/18813
|
||||
4. updated streams error handling
|
||||
https://github.com/nodejs/node/pull/18438
|
||||
5. writable.end should return this.
|
||||
https://github.com/nodejs/node/pull/18780
|
||||
6. readable continues to read when push('')
|
||||
https://github.com/nodejs/node/pull/18211
|
||||
7. add custom inspect to BufferList
|
||||
https://github.com/nodejs/node/pull/17907
|
||||
8. always defer 'readable' with nextTick
|
||||
https://github.com/nodejs/node/pull/17979
|
||||
|
||||
## Version 2.x.x
|
||||
|
||||
v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11.
|
||||
|
||||
# Usage
|
||||
|
||||
You can swap your `require('stream')` with `require('readable-stream')`
|
||||
without any changes, if you are just using one of the main classes and
|
||||
functions.
|
||||
|
||||
```js
|
||||
const {
|
||||
Readable,
|
||||
Writable,
|
||||
Transform,
|
||||
Duplex,
|
||||
pipeline,
|
||||
finished
|
||||
} = require('readable-stream')
|
||||
```
|
||||
|
||||
Note that `require('stream')` will return `Stream`, while
|
||||
`require('readable-stream')` will return `Readable`. We discourage using
|
||||
whatever is exported directly, but rather use one of the properties as
|
||||
shown in the example above.
|
||||
|
||||
## Usage In Browsers
|
||||
|
||||
You will need a bundler like [`browserify`](https://github.com/browserify/browserify#readme), [`webpack`](https://webpack.js.org/), [`parcel`](https://github.com/parcel-bundler/parcel#readme) or similar. Polyfills are no longer required since version 4.2.0.
|
||||
|
||||
# Streams Working Group
|
||||
|
||||
`readable-stream` is maintained by the Streams Working Group, which
|
||||
oversees the development and maintenance of the Streams API within
|
||||
Node.js. The responsibilities of the Streams Working Group include:
|
||||
|
||||
- Addressing stream issues on the Node.js issue tracker.
|
||||
- Authoring and editing stream documentation within the Node.js project.
|
||||
- Reviewing changes to stream subclasses within the Node.js project.
|
||||
- Redirecting changes to streams from the Node.js project to this
|
||||
project.
|
||||
- Assisting in the implementation of stream providers within Node.js.
|
||||
- Recommending versions of `readable-stream` to be included in Node.js.
|
||||
- Messaging about the future of streams to give the community advance
|
||||
notice of changes.
|
||||
|
||||
<a name="members"></a>
|
||||
|
||||
## Team Members
|
||||
|
||||
- **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
|
||||
- **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com>
|
||||
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
|
||||
- **Robert Nagy** ([@ronag](https://github.com/ronag)) <ronagy@icloud.com>
|
||||
- **Vincent Weevers** ([@vweevers](https://github.com/vweevers)) <mail@vincentweevers.nl>
|
||||
4
node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
4
node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Duplex
|
||||
4
node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
4
node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').PassThrough
|
||||
4
node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
4
node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Readable
|
||||
4
node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
4
node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Transform
|
||||
4
node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
4
node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict'
|
||||
|
||||
// Keep this file as an alias for the full stream module.
|
||||
module.exports = require('./stream').Writable
|
||||
52
node_modules/bl/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
52
node_modules/bl/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
'use strict'
|
||||
|
||||
const { SymbolDispose } = require('../../ours/primordials')
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { isNodeStream, isWebStream, kControllerErrorFunction } = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const { ERR_INVALID_ARG_TYPE } = codes
|
||||
let addAbortListener
|
||||
|
||||
// This method is inlined here for readable-stream
|
||||
// It also does not allow for signal to not exist on the stream
|
||||
// https://github.com/nodejs/node/pull/36061#discussion_r533718029
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
module.exports.addAbortSignal = function addAbortSignal(signal, stream) {
|
||||
validateAbortSignal(signal, 'signal')
|
||||
if (!isNodeStream(stream) && !isWebStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
return module.exports.addAbortSignalNoValidate(signal, stream)
|
||||
}
|
||||
module.exports.addAbortSignalNoValidate = function (signal, stream) {
|
||||
if (typeof signal !== 'object' || !('aborted' in signal)) {
|
||||
return stream
|
||||
}
|
||||
const onAbort = isNodeStream(stream)
|
||||
? () => {
|
||||
stream.destroy(
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
: () => {
|
||||
stream[kControllerErrorFunction](
|
||||
new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (signal.aborted) {
|
||||
onAbort()
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(signal, onAbort)
|
||||
eos(stream, disposable[SymbolDispose])
|
||||
}
|
||||
return stream
|
||||
}
|
||||
157
node_modules/bl/node_modules/readable-stream/lib/internal/streams/buffer_list.js
generated
vendored
Normal file
157
node_modules/bl/node_modules/readable-stream/lib/internal/streams/buffer_list.js
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
'use strict'
|
||||
|
||||
const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { inspect } = require('../../ours/util')
|
||||
module.exports = class BufferList {
|
||||
constructor() {
|
||||
this.head = null
|
||||
this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
push(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: null
|
||||
}
|
||||
if (this.length > 0) this.tail.next = entry
|
||||
else this.head = entry
|
||||
this.tail = entry
|
||||
++this.length
|
||||
}
|
||||
unshift(v) {
|
||||
const entry = {
|
||||
data: v,
|
||||
next: this.head
|
||||
}
|
||||
if (this.length === 0) this.tail = entry
|
||||
this.head = entry
|
||||
++this.length
|
||||
}
|
||||
shift() {
|
||||
if (this.length === 0) return
|
||||
const ret = this.head.data
|
||||
if (this.length === 1) this.head = this.tail = null
|
||||
else this.head = this.head.next
|
||||
--this.length
|
||||
return ret
|
||||
}
|
||||
clear() {
|
||||
this.head = this.tail = null
|
||||
this.length = 0
|
||||
}
|
||||
join(s) {
|
||||
if (this.length === 0) return ''
|
||||
let p = this.head
|
||||
let ret = '' + p.data
|
||||
while ((p = p.next) !== null) ret += s + p.data
|
||||
return ret
|
||||
}
|
||||
concat(n) {
|
||||
if (this.length === 0) return Buffer.alloc(0)
|
||||
const ret = Buffer.allocUnsafe(n >>> 0)
|
||||
let p = this.head
|
||||
let i = 0
|
||||
while (p) {
|
||||
TypedArrayPrototypeSet(ret, p.data, i)
|
||||
i += p.data.length
|
||||
p = p.next
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes or characters from the buffered data.
|
||||
consume(n, hasStrings) {
|
||||
const data = this.head.data
|
||||
if (n < data.length) {
|
||||
// `slice` is the same for buffers and strings.
|
||||
const slice = data.slice(0, n)
|
||||
this.head.data = data.slice(n)
|
||||
return slice
|
||||
}
|
||||
if (n === data.length) {
|
||||
// First chunk is a perfect match.
|
||||
return this.shift()
|
||||
}
|
||||
// Result spans more than one buffer.
|
||||
return hasStrings ? this._getString(n) : this._getBuffer(n)
|
||||
}
|
||||
first() {
|
||||
return this.head.data
|
||||
}
|
||||
*[SymbolIterator]() {
|
||||
for (let p = this.head; p; p = p.next) {
|
||||
yield p.data
|
||||
}
|
||||
}
|
||||
|
||||
// Consumes a specified amount of characters from the buffered data.
|
||||
_getString(n) {
|
||||
let ret = ''
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const str = p.data
|
||||
if (n > str.length) {
|
||||
ret += str
|
||||
n -= str.length
|
||||
} else {
|
||||
if (n === str.length) {
|
||||
ret += str
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
ret += StringPrototypeSlice(str, 0, n)
|
||||
this.head = p
|
||||
p.data = StringPrototypeSlice(str, n)
|
||||
}
|
||||
break
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Consumes a specified amount of bytes from the buffered data.
|
||||
_getBuffer(n) {
|
||||
const ret = Buffer.allocUnsafe(n)
|
||||
const retLen = n
|
||||
let p = this.head
|
||||
let c = 0
|
||||
do {
|
||||
const buf = p.data
|
||||
if (n > buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
n -= buf.length
|
||||
} else {
|
||||
if (n === buf.length) {
|
||||
TypedArrayPrototypeSet(ret, buf, retLen - n)
|
||||
++c
|
||||
if (p.next) this.head = p.next
|
||||
else this.head = this.tail = null
|
||||
} else {
|
||||
TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n)
|
||||
this.head = p
|
||||
p.data = buf.slice(n)
|
||||
}
|
||||
break
|
||||
}
|
||||
++c
|
||||
} while ((p = p.next) !== null)
|
||||
this.length -= c
|
||||
return ret
|
||||
}
|
||||
|
||||
// Make sure the linked list only shows the minimal necessary information.
|
||||
[Symbol.for('nodejs.util.inspect.custom')](_, options) {
|
||||
return inspect(this, {
|
||||
...options,
|
||||
// Only inspect one level.
|
||||
depth: 0,
|
||||
// It should not recurse.
|
||||
customInspect: false
|
||||
})
|
||||
}
|
||||
}
|
||||
194
node_modules/bl/node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
194
node_modules/bl/node_modules/readable-stream/lib/internal/streams/compose.js
generated
vendored
Normal file
@ -0,0 +1,194 @@
|
||||
'use strict'
|
||||
|
||||
const { pipeline } = require('./pipeline')
|
||||
const Duplex = require('./duplex')
|
||||
const { destroyer } = require('./destroy')
|
||||
const {
|
||||
isNodeStream,
|
||||
isReadable,
|
||||
isWritable,
|
||||
isWebStream,
|
||||
isTransformStream,
|
||||
isWritableStream,
|
||||
isReadableStream
|
||||
} = require('./utils')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }
|
||||
} = require('../../ours/errors')
|
||||
const eos = require('./end-of-stream')
|
||||
module.exports = function compose(...streams) {
|
||||
if (streams.length === 0) {
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
if (streams.length === 1) {
|
||||
return Duplex.from(streams[0])
|
||||
}
|
||||
const orgStreams = [...streams]
|
||||
if (typeof streams[0] === 'function') {
|
||||
streams[0] = Duplex.from(streams[0])
|
||||
}
|
||||
if (typeof streams[streams.length - 1] === 'function') {
|
||||
const idx = streams.length - 1
|
||||
streams[idx] = Duplex.from(streams[idx])
|
||||
}
|
||||
for (let n = 0; n < streams.length; ++n) {
|
||||
if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {
|
||||
// TODO(ronag): Add checks for non streams.
|
||||
continue
|
||||
}
|
||||
if (
|
||||
n < streams.length - 1 &&
|
||||
!(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n]))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable')
|
||||
}
|
||||
if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) {
|
||||
throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable')
|
||||
}
|
||||
}
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
} else if (!readable && !writable) {
|
||||
d.destroy()
|
||||
}
|
||||
}
|
||||
const head = streams[0]
|
||||
const tail = pipeline(streams, onfinished)
|
||||
const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head))
|
||||
const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail))
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplex({
|
||||
// TODO (ronag): highWaterMark?
|
||||
writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode),
|
||||
readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode),
|
||||
writable,
|
||||
readable
|
||||
})
|
||||
if (writable) {
|
||||
if (isNodeStream(head)) {
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (head.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
head.end()
|
||||
onfinish = callback
|
||||
}
|
||||
head.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else if (isWebStream(head)) {
|
||||
const writable = isTransformStream(head) ? head.writable : head
|
||||
const writer = writable.getWriter()
|
||||
d._write = async function (chunk, encoding, callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
callback()
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
d._final = async function (callback) {
|
||||
try {
|
||||
await writer.ready
|
||||
writer.close().catch(() => {})
|
||||
onfinish = callback
|
||||
} catch (err) {
|
||||
callback(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
const toRead = isTransformStream(tail) ? tail.readable : tail
|
||||
eos(toRead, () => {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
if (isNodeStream(tail)) {
|
||||
tail.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
tail.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = tail.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isWebStream(tail)) {
|
||||
const readable = isTransformStream(tail) ? tail.readable : tail
|
||||
const reader = readable.getReader()
|
||||
d._read = async function () {
|
||||
while (true) {
|
||||
try {
|
||||
const { value, done } = await reader.read()
|
||||
if (!d.push(value)) {
|
||||
return
|
||||
}
|
||||
if (done) {
|
||||
d.push(null)
|
||||
return
|
||||
}
|
||||
} catch {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
if (isNodeStream(tail)) {
|
||||
destroyer(tail, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
290
node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
290
node_modules/bl/node_modules/readable-stream/lib/internal/streams/destroy.js
generated
vendored
Normal file
@ -0,0 +1,290 @@
|
||||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: { ERR_MULTIPLE_CALLBACK },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { Symbol } = require('../../ours/primordials')
|
||||
const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = require('./utils')
|
||||
const kDestroy = Symbol('kDestroy')
|
||||
const kConstruct = Symbol('kConstruct')
|
||||
function checkError(err, w, r) {
|
||||
if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Backwards compat. cb() is undocumented and unused in core but
|
||||
// unfortunately might be used by modules.
|
||||
function destroy(err, cb) {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
// With duplex streams we use the writable side for state.
|
||||
const s = w || r
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
if (typeof cb === 'function') {
|
||||
cb()
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
// We set destroyed to true before firing error callbacks in order
|
||||
// to make it re-entrance safe in case destroy() is called within callbacks
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.destroyed = true
|
||||
}
|
||||
if (r) {
|
||||
r.destroyed = true
|
||||
}
|
||||
|
||||
// If still constructing then defer calling _destroy.
|
||||
if (!s.constructed) {
|
||||
this.once(kDestroy, function (er) {
|
||||
_destroy(this, aggregateTwoErrors(er, err), cb)
|
||||
})
|
||||
} else {
|
||||
_destroy(this, err, cb)
|
||||
}
|
||||
return this
|
||||
}
|
||||
function _destroy(self, err, cb) {
|
||||
let called = false
|
||||
function onDestroy(err) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
checkError(err, w, r)
|
||||
if (w) {
|
||||
w.closed = true
|
||||
}
|
||||
if (r) {
|
||||
r.closed = true
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb(err)
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(emitErrorCloseNT, self, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseNT, self)
|
||||
}
|
||||
}
|
||||
try {
|
||||
self._destroy(err || null, onDestroy)
|
||||
} catch (err) {
|
||||
onDestroy(err)
|
||||
}
|
||||
}
|
||||
function emitErrorCloseNT(self, err) {
|
||||
emitErrorNT(self, err)
|
||||
emitCloseNT(self)
|
||||
}
|
||||
function emitCloseNT(self) {
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if (w) {
|
||||
w.closeEmitted = true
|
||||
}
|
||||
if (r) {
|
||||
r.closeEmitted = true
|
||||
}
|
||||
if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) {
|
||||
self.emit('close')
|
||||
}
|
||||
}
|
||||
function emitErrorNT(self, err) {
|
||||
const r = self._readableState
|
||||
const w = self._writableState
|
||||
if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) {
|
||||
return
|
||||
}
|
||||
if (w) {
|
||||
w.errorEmitted = true
|
||||
}
|
||||
if (r) {
|
||||
r.errorEmitted = true
|
||||
}
|
||||
self.emit('error', err)
|
||||
}
|
||||
function undestroy() {
|
||||
const r = this._readableState
|
||||
const w = this._writableState
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
r.closed = false
|
||||
r.closeEmitted = false
|
||||
r.destroyed = false
|
||||
r.errored = null
|
||||
r.errorEmitted = false
|
||||
r.reading = false
|
||||
r.ended = r.readable === false
|
||||
r.endEmitted = r.readable === false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
w.destroyed = false
|
||||
w.closed = false
|
||||
w.closeEmitted = false
|
||||
w.errored = null
|
||||
w.errorEmitted = false
|
||||
w.finalCalled = false
|
||||
w.prefinished = false
|
||||
w.ended = w.writable === false
|
||||
w.ending = w.writable === false
|
||||
w.finished = w.writable === false
|
||||
}
|
||||
}
|
||||
function errorOrDestroy(stream, err, sync) {
|
||||
// We have tests that rely on errors being emitted
|
||||
// in the same tick, so changing this is semver major.
|
||||
// For now when you opt-in to autoDestroy we allow
|
||||
// the error to be emitted nextTick. In a future
|
||||
// semver major update we should change the default to this.
|
||||
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) {
|
||||
return this
|
||||
}
|
||||
if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy))
|
||||
stream.destroy(err)
|
||||
else if (err) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
err.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (w && !w.errored) {
|
||||
w.errored = err
|
||||
}
|
||||
if (r && !r.errored) {
|
||||
r.errored = err
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(emitErrorNT, stream, err)
|
||||
} else {
|
||||
emitErrorNT(stream, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function construct(stream, cb) {
|
||||
if (typeof stream._construct !== 'function') {
|
||||
return
|
||||
}
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
if (r) {
|
||||
r.constructed = false
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = false
|
||||
}
|
||||
stream.once(kConstruct, cb)
|
||||
if (stream.listenerCount(kConstruct) > 1) {
|
||||
// Duplex
|
||||
return
|
||||
}
|
||||
process.nextTick(constructNT, stream)
|
||||
}
|
||||
function constructNT(stream) {
|
||||
let called = false
|
||||
function onConstruct(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
const r = stream._readableState
|
||||
const w = stream._writableState
|
||||
const s = w || r
|
||||
if (r) {
|
||||
r.constructed = true
|
||||
}
|
||||
if (w) {
|
||||
w.constructed = true
|
||||
}
|
||||
if (s.destroyed) {
|
||||
stream.emit(kDestroy, err)
|
||||
} else if (err) {
|
||||
errorOrDestroy(stream, err, true)
|
||||
} else {
|
||||
process.nextTick(emitConstructNT, stream)
|
||||
}
|
||||
}
|
||||
try {
|
||||
stream._construct((err) => {
|
||||
process.nextTick(onConstruct, err)
|
||||
})
|
||||
} catch (err) {
|
||||
process.nextTick(onConstruct, err)
|
||||
}
|
||||
}
|
||||
function emitConstructNT(stream) {
|
||||
stream.emit(kConstruct)
|
||||
}
|
||||
function isRequest(stream) {
|
||||
return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function'
|
||||
}
|
||||
function emitCloseLegacy(stream) {
|
||||
stream.emit('close')
|
||||
}
|
||||
function emitErrorCloseLegacy(stream, err) {
|
||||
stream.emit('error', err)
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
|
||||
// Normalize destroy for legacy.
|
||||
function destroyer(stream, err) {
|
||||
if (!stream || isDestroyed(stream)) {
|
||||
return
|
||||
}
|
||||
if (!err && !isFinished(stream)) {
|
||||
err = new AbortError()
|
||||
}
|
||||
|
||||
// TODO: Remove isRequest branches.
|
||||
if (isServerRequest(stream)) {
|
||||
stream.socket = null
|
||||
stream.destroy(err)
|
||||
} else if (isRequest(stream)) {
|
||||
stream.abort()
|
||||
} else if (isRequest(stream.req)) {
|
||||
stream.req.abort()
|
||||
} else if (typeof stream.destroy === 'function') {
|
||||
stream.destroy(err)
|
||||
} else if (typeof stream.close === 'function') {
|
||||
// TODO: Don't lose err?
|
||||
stream.close()
|
||||
} else if (err) {
|
||||
process.nextTick(emitErrorCloseLegacy, stream, err)
|
||||
} else {
|
||||
process.nextTick(emitCloseLegacy, stream)
|
||||
}
|
||||
if (!stream.destroyed) {
|
||||
stream[kIsDestroyed] = true
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
construct,
|
||||
destroyer,
|
||||
destroy,
|
||||
undestroy,
|
||||
errorOrDestroy
|
||||
}
|
||||
143
node_modules/bl/node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
143
node_modules/bl/node_modules/readable-stream/lib/internal/streams/duplex.js
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototype inheritance, this class
|
||||
// prototypically inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ObjectDefineProperties,
|
||||
ObjectGetOwnPropertyDescriptor,
|
||||
ObjectKeys,
|
||||
ObjectSetPrototypeOf
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Duplex
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype)
|
||||
ObjectSetPrototypeOf(Duplex, Readable)
|
||||
{
|
||||
const keys = ObjectKeys(Writable.prototype)
|
||||
// Allow the keys array to be GC'ed.
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const method = keys[i]
|
||||
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]
|
||||
}
|
||||
}
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex)) return new Duplex(options)
|
||||
Readable.call(this, options)
|
||||
Writable.call(this, options)
|
||||
if (options) {
|
||||
this.allowHalfOpen = options.allowHalfOpen !== false
|
||||
if (options.readable === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if (options.writable === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
} else {
|
||||
this.allowHalfOpen = true
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Duplex.prototype, {
|
||||
writable: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable')
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark')
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode')
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer')
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength')
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished')
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked')
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded')
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain')
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
if (this._readableState === undefined || this._writableState === undefined) {
|
||||
return false
|
||||
}
|
||||
return this._readableState.destroyed && this._writableState.destroyed
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly
|
||||
// managing destroyed.
|
||||
if (this._readableState && this._writableState) {
|
||||
this._readableState.destroyed = value
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Duplex.fromWeb = function (pair, options) {
|
||||
return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options)
|
||||
}
|
||||
Duplex.toWeb = function (duplex) {
|
||||
return lazyWebStreams().newReadableWritablePairFromDuplex(duplex)
|
||||
}
|
||||
let duplexify
|
||||
Duplex.from = function (body) {
|
||||
if (!duplexify) {
|
||||
duplexify = require('./duplexify')
|
||||
}
|
||||
return duplexify(body, 'body')
|
||||
}
|
||||
378
node_modules/bl/node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
378
node_modules/bl/node_modules/readable-stream/lib/internal/streams/duplexify.js
generated
vendored
Normal file
@ -0,0 +1,378 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
;('use strict')
|
||||
const bufferModule = require('buffer')
|
||||
const {
|
||||
isReadable,
|
||||
isWritable,
|
||||
isIterable,
|
||||
isNodeStream,
|
||||
isReadableNodeStream,
|
||||
isWritableNodeStream,
|
||||
isDuplexNodeStream,
|
||||
isReadableStream,
|
||||
isWritableStream
|
||||
} = require('./utils')
|
||||
const eos = require('./end-of-stream')
|
||||
const {
|
||||
AbortError,
|
||||
codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE }
|
||||
} = require('../../ours/errors')
|
||||
const { destroyer } = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const Readable = require('./readable')
|
||||
const Writable = require('./writable')
|
||||
const { createDeferredPromise } = require('../../ours/util')
|
||||
const from = require('./from')
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const { FunctionPrototypeCall } = require('../../ours/primordials')
|
||||
|
||||
// This is needed for pre node 17.
|
||||
class Duplexify extends Duplex {
|
||||
constructor(options) {
|
||||
super(options)
|
||||
|
||||
// https://github.com/nodejs/node/pull/34385
|
||||
|
||||
if ((options === null || options === undefined ? undefined : options.readable) === false) {
|
||||
this._readableState.readable = false
|
||||
this._readableState.ended = true
|
||||
this._readableState.endEmitted = true
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.writable) === false) {
|
||||
this._writableState.writable = false
|
||||
this._writableState.ending = true
|
||||
this._writableState.ended = true
|
||||
this._writableState.finished = true
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = function duplexify(body, name) {
|
||||
if (isDuplexNodeStream(body)) {
|
||||
return body
|
||||
}
|
||||
if (isReadableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
readable: body
|
||||
})
|
||||
}
|
||||
if (isWritableNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: body
|
||||
})
|
||||
}
|
||||
if (isNodeStream(body)) {
|
||||
return _duplexify({
|
||||
writable: false,
|
||||
readable: false
|
||||
})
|
||||
}
|
||||
if (isReadableStream(body)) {
|
||||
return _duplexify({
|
||||
readable: Readable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (isWritableStream(body)) {
|
||||
return _duplexify({
|
||||
writable: Writable.fromWeb(body)
|
||||
})
|
||||
}
|
||||
if (typeof body === 'function') {
|
||||
const { value, write, final, destroy } = fromAsyncGen(body)
|
||||
if (isIterable(value)) {
|
||||
return from(Duplexify, value, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
write,
|
||||
final,
|
||||
destroy
|
||||
})
|
||||
}
|
||||
const then = value === null || value === undefined ? undefined : value.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
const promise = FunctionPrototypeCall(
|
||||
then,
|
||||
value,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val)
|
||||
}
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
readable: false,
|
||||
write,
|
||||
final(cb) {
|
||||
final(async () => {
|
||||
try {
|
||||
await promise
|
||||
process.nextTick(cb, null)
|
||||
} catch (err) {
|
||||
process.nextTick(cb, err)
|
||||
}
|
||||
})
|
||||
},
|
||||
destroy
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value)
|
||||
}
|
||||
if (isBlob(body)) {
|
||||
return duplexify(body.arrayBuffer())
|
||||
}
|
||||
if (isIterable(body)) {
|
||||
return from(Duplexify, body, {
|
||||
// TODO (ronag): highWaterMark?
|
||||
objectMode: true,
|
||||
writable: false
|
||||
})
|
||||
}
|
||||
if (
|
||||
isReadableStream(body === null || body === undefined ? undefined : body.readable) &&
|
||||
isWritableStream(body === null || body === undefined ? undefined : body.writable)
|
||||
) {
|
||||
return Duplexify.fromWeb(body)
|
||||
}
|
||||
if (
|
||||
typeof (body === null || body === undefined ? undefined : body.writable) === 'object' ||
|
||||
typeof (body === null || body === undefined ? undefined : body.readable) === 'object'
|
||||
) {
|
||||
const readable =
|
||||
body !== null && body !== undefined && body.readable
|
||||
? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.readable
|
||||
: duplexify(body.readable)
|
||||
: undefined
|
||||
const writable =
|
||||
body !== null && body !== undefined && body.writable
|
||||
? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable)
|
||||
? body === null || body === undefined
|
||||
? undefined
|
||||
: body.writable
|
||||
: duplexify(body.writable)
|
||||
: undefined
|
||||
return _duplexify({
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
}
|
||||
const then = body === null || body === undefined ? undefined : body.then
|
||||
if (typeof then === 'function') {
|
||||
let d
|
||||
FunctionPrototypeCall(
|
||||
then,
|
||||
body,
|
||||
(val) => {
|
||||
if (val != null) {
|
||||
d.push(val)
|
||||
}
|
||||
d.push(null)
|
||||
},
|
||||
(err) => {
|
||||
destroyer(d, err)
|
||||
}
|
||||
)
|
||||
return (d = new Duplexify({
|
||||
objectMode: true,
|
||||
writable: false,
|
||||
read() {}
|
||||
}))
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
name,
|
||||
[
|
||||
'Blob',
|
||||
'ReadableStream',
|
||||
'WritableStream',
|
||||
'Stream',
|
||||
'Iterable',
|
||||
'AsyncIterable',
|
||||
'Function',
|
||||
'{ readable, writable } pair',
|
||||
'Promise'
|
||||
],
|
||||
body
|
||||
)
|
||||
}
|
||||
function fromAsyncGen(fn) {
|
||||
let { promise, resolve } = createDeferredPromise()
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const value = fn(
|
||||
(async function* () {
|
||||
while (true) {
|
||||
const _promise = promise
|
||||
promise = null
|
||||
const { chunk, done, cb } = await _promise
|
||||
process.nextTick(cb)
|
||||
if (done) return
|
||||
if (signal.aborted)
|
||||
throw new AbortError(undefined, {
|
||||
cause: signal.reason
|
||||
})
|
||||
;({ promise, resolve } = createDeferredPromise())
|
||||
yield chunk
|
||||
}
|
||||
})(),
|
||||
{
|
||||
signal
|
||||
}
|
||||
)
|
||||
return {
|
||||
value,
|
||||
write(chunk, encoding, cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
chunk,
|
||||
done: false,
|
||||
cb
|
||||
})
|
||||
},
|
||||
final(cb) {
|
||||
const _resolve = resolve
|
||||
resolve = null
|
||||
_resolve({
|
||||
done: true,
|
||||
cb
|
||||
})
|
||||
},
|
||||
destroy(err, cb) {
|
||||
ac.abort()
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function _duplexify(pair) {
|
||||
const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable
|
||||
const w = pair.writable
|
||||
let readable = !!isReadable(r)
|
||||
let writable = !!isWritable(w)
|
||||
let ondrain
|
||||
let onfinish
|
||||
let onreadable
|
||||
let onclose
|
||||
let d
|
||||
function onfinished(err) {
|
||||
const cb = onclose
|
||||
onclose = null
|
||||
if (cb) {
|
||||
cb(err)
|
||||
} else if (err) {
|
||||
d.destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(ronag): Avoid double buffering.
|
||||
// Implement Writable/Readable/Duplex traits.
|
||||
// See, https://github.com/nodejs/node/pull/33515.
|
||||
d = new Duplexify({
|
||||
// TODO (ronag): highWaterMark?
|
||||
readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode),
|
||||
writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode),
|
||||
readable,
|
||||
writable
|
||||
})
|
||||
if (writable) {
|
||||
eos(w, (err) => {
|
||||
writable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
d._write = function (chunk, encoding, callback) {
|
||||
if (w.write(chunk, encoding)) {
|
||||
callback()
|
||||
} else {
|
||||
ondrain = callback
|
||||
}
|
||||
}
|
||||
d._final = function (callback) {
|
||||
w.end()
|
||||
onfinish = callback
|
||||
}
|
||||
w.on('drain', function () {
|
||||
if (ondrain) {
|
||||
const cb = ondrain
|
||||
ondrain = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
w.on('finish', function () {
|
||||
if (onfinish) {
|
||||
const cb = onfinish
|
||||
onfinish = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
}
|
||||
if (readable) {
|
||||
eos(r, (err) => {
|
||||
readable = false
|
||||
if (err) {
|
||||
destroyer(r, err)
|
||||
}
|
||||
onfinished(err)
|
||||
})
|
||||
r.on('readable', function () {
|
||||
if (onreadable) {
|
||||
const cb = onreadable
|
||||
onreadable = null
|
||||
cb()
|
||||
}
|
||||
})
|
||||
r.on('end', function () {
|
||||
d.push(null)
|
||||
})
|
||||
d._read = function () {
|
||||
while (true) {
|
||||
const buf = r.read()
|
||||
if (buf === null) {
|
||||
onreadable = d._read
|
||||
return
|
||||
}
|
||||
if (!d.push(buf)) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
d._destroy = function (err, callback) {
|
||||
if (!err && onclose !== null) {
|
||||
err = new AbortError()
|
||||
}
|
||||
onreadable = null
|
||||
ondrain = null
|
||||
onfinish = null
|
||||
if (onclose === null) {
|
||||
callback(err)
|
||||
} else {
|
||||
onclose = callback
|
||||
destroyer(w, err)
|
||||
destroyer(r, err)
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
284
node_modules/bl/node_modules/readable-stream/lib/internal/streams/end-of-stream.js
generated
vendored
Normal file
284
node_modules/bl/node_modules/readable-stream/lib/internal/streams/end-of-stream.js
generated
vendored
Normal file
@ -0,0 +1,284 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Ported from https://github.com/mafintosh/end-of-stream with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
;('use strict')
|
||||
const { AbortError, codes } = require('../../ours/errors')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes
|
||||
const { kEmptyObject, once } = require('../../ours/util')
|
||||
const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require('../validators')
|
||||
const { Promise, PromisePrototypeThen, SymbolDispose } = require('../../ours/primordials')
|
||||
const {
|
||||
isClosed,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isNodeStream,
|
||||
willEmitClose: _willEmitClose,
|
||||
kIsClosedPromise
|
||||
} = require('./utils')
|
||||
let addAbortListener
|
||||
function isRequest(stream) {
|
||||
return stream.setHeader && typeof stream.abort === 'function'
|
||||
}
|
||||
const nop = () => {}
|
||||
function eos(stream, options, callback) {
|
||||
var _options$readable, _options$writable
|
||||
if (arguments.length === 2) {
|
||||
callback = options
|
||||
options = kEmptyObject
|
||||
} else if (options == null) {
|
||||
options = kEmptyObject
|
||||
} else {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
validateFunction(callback, 'callback')
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
callback = once(callback)
|
||||
if (isReadableStream(stream) || isWritableStream(stream)) {
|
||||
return eosWeb(stream, options, callback)
|
||||
}
|
||||
if (!isNodeStream(stream)) {
|
||||
throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream)
|
||||
}
|
||||
const readable =
|
||||
(_options$readable = options.readable) !== null && _options$readable !== undefined
|
||||
? _options$readable
|
||||
: isReadableNodeStream(stream)
|
||||
const writable =
|
||||
(_options$writable = options.writable) !== null && _options$writable !== undefined
|
||||
? _options$writable
|
||||
: isWritableNodeStream(stream)
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const onlegacyfinish = () => {
|
||||
if (!stream.writable) {
|
||||
onfinish()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO (ronag): Improve soft detection to include core modules and
|
||||
// common ecosystem modules that do properly emit 'close' but fail
|
||||
// this generic check.
|
||||
let willEmitClose =
|
||||
_willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable
|
||||
let writableFinished = isWritableFinished(stream, false)
|
||||
const onfinish = () => {
|
||||
writableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.readable || readable)) {
|
||||
return
|
||||
}
|
||||
if (!readable || readableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
let readableFinished = isReadableFinished(stream, false)
|
||||
const onend = () => {
|
||||
readableFinished = true
|
||||
// Stream should not be destroyed here. If it is that
|
||||
// means that user space is doing something differently and
|
||||
// we cannot trust willEmitClose.
|
||||
if (stream.destroyed) {
|
||||
willEmitClose = false
|
||||
}
|
||||
if (willEmitClose && (!stream.writable || writable)) {
|
||||
return
|
||||
}
|
||||
if (!writable || writableFinished) {
|
||||
callback.call(stream)
|
||||
}
|
||||
}
|
||||
const onerror = (err) => {
|
||||
callback.call(stream, err)
|
||||
}
|
||||
let closed = isClosed(stream)
|
||||
const onclose = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
|
||||
if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
if (writable && !writableFinished) {
|
||||
if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onclosed = () => {
|
||||
closed = true
|
||||
const errored = isWritableErrored(stream) || isReadableErrored(stream)
|
||||
if (errored && typeof errored !== 'boolean') {
|
||||
return callback.call(stream, errored)
|
||||
}
|
||||
callback.call(stream)
|
||||
}
|
||||
const onrequest = () => {
|
||||
stream.req.on('finish', onfinish)
|
||||
}
|
||||
if (isRequest(stream)) {
|
||||
stream.on('complete', onfinish)
|
||||
if (!willEmitClose) {
|
||||
stream.on('abort', onclose)
|
||||
}
|
||||
if (stream.req) {
|
||||
onrequest()
|
||||
} else {
|
||||
stream.on('request', onrequest)
|
||||
}
|
||||
} else if (writable && !wState) {
|
||||
// legacy streams
|
||||
stream.on('end', onlegacyfinish)
|
||||
stream.on('close', onlegacyfinish)
|
||||
}
|
||||
|
||||
// Not all streams will emit 'close' after 'aborted'.
|
||||
if (!willEmitClose && typeof stream.aborted === 'boolean') {
|
||||
stream.on('aborted', onclose)
|
||||
}
|
||||
stream.on('end', onend)
|
||||
stream.on('finish', onfinish)
|
||||
if (options.error !== false) {
|
||||
stream.on('error', onerror)
|
||||
}
|
||||
stream.on('close', onclose)
|
||||
if (closed) {
|
||||
process.nextTick(onclose)
|
||||
} else if (
|
||||
(wState !== null && wState !== undefined && wState.errorEmitted) ||
|
||||
(rState !== null && rState !== undefined && rState.errorEmitted)
|
||||
) {
|
||||
if (!willEmitClose) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
} else if (
|
||||
!readable &&
|
||||
(!willEmitClose || isReadable(stream)) &&
|
||||
(writableFinished || isWritable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (
|
||||
!writable &&
|
||||
(!willEmitClose || isWritable(stream)) &&
|
||||
(readableFinished || isReadable(stream) === false)
|
||||
) {
|
||||
process.nextTick(onclosed)
|
||||
} else if (rState && stream.req && stream.aborted) {
|
||||
process.nextTick(onclosed)
|
||||
}
|
||||
const cleanup = () => {
|
||||
callback = nop
|
||||
stream.removeListener('aborted', onclose)
|
||||
stream.removeListener('complete', onfinish)
|
||||
stream.removeListener('abort', onclose)
|
||||
stream.removeListener('request', onrequest)
|
||||
if (stream.req) stream.req.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onlegacyfinish)
|
||||
stream.removeListener('close', onlegacyfinish)
|
||||
stream.removeListener('finish', onfinish)
|
||||
stream.removeListener('end', onend)
|
||||
stream.removeListener('error', onerror)
|
||||
stream.removeListener('close', onclose)
|
||||
}
|
||||
if (options.signal && !closed) {
|
||||
const abort = () => {
|
||||
// Keep it because cleanup removes it.
|
||||
const endCallback = callback
|
||||
cleanup()
|
||||
endCallback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
return cleanup
|
||||
}
|
||||
function eosWeb(stream, options, callback) {
|
||||
let isAborted = false
|
||||
let abort = nop
|
||||
if (options.signal) {
|
||||
abort = () => {
|
||||
isAborted = true
|
||||
callback.call(
|
||||
stream,
|
||||
new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
)
|
||||
}
|
||||
if (options.signal.aborted) {
|
||||
process.nextTick(abort)
|
||||
} else {
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
const disposable = addAbortListener(options.signal, abort)
|
||||
const originalCallback = callback
|
||||
callback = once((...args) => {
|
||||
disposable[SymbolDispose]()
|
||||
originalCallback.apply(stream, args)
|
||||
})
|
||||
}
|
||||
}
|
||||
const resolverFn = (...args) => {
|
||||
if (!isAborted) {
|
||||
process.nextTick(() => callback.apply(stream, args))
|
||||
}
|
||||
}
|
||||
PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn)
|
||||
return nop
|
||||
}
|
||||
function finished(stream, opts) {
|
||||
var _opts
|
||||
let autoCleanup = false
|
||||
if (opts === null) {
|
||||
opts = kEmptyObject
|
||||
}
|
||||
if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) {
|
||||
validateBoolean(opts.cleanup, 'cleanup')
|
||||
autoCleanup = opts.cleanup
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
const cleanup = eos(stream, opts, (err) => {
|
||||
if (autoCleanup) {
|
||||
cleanup()
|
||||
}
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
module.exports = eos
|
||||
module.exports.finished = finished
|
||||
98
node_modules/bl/node_modules/readable-stream/lib/internal/streams/from.js
generated
vendored
Normal file
98
node_modules/bl/node_modules/readable-stream/lib/internal/streams/from.js
generated
vendored
Normal file
@ -0,0 +1,98 @@
|
||||
'use strict'
|
||||
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
|
||||
const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require('../../ours/primordials')
|
||||
const { Buffer } = require('buffer')
|
||||
const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = require('../../ours/errors').codes
|
||||
function from(Readable, iterable, opts) {
|
||||
let iterator
|
||||
if (typeof iterable === 'string' || iterable instanceof Buffer) {
|
||||
return new Readable({
|
||||
objectMode: true,
|
||||
...opts,
|
||||
read() {
|
||||
this.push(iterable)
|
||||
this.push(null)
|
||||
}
|
||||
})
|
||||
}
|
||||
let isAsync
|
||||
if (iterable && iterable[SymbolAsyncIterator]) {
|
||||
isAsync = true
|
||||
iterator = iterable[SymbolAsyncIterator]()
|
||||
} else if (iterable && iterable[SymbolIterator]) {
|
||||
isAsync = false
|
||||
iterator = iterable[SymbolIterator]()
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable)
|
||||
}
|
||||
const readable = new Readable({
|
||||
objectMode: true,
|
||||
highWaterMark: 1,
|
||||
// TODO(ronag): What options should be allowed?
|
||||
...opts
|
||||
})
|
||||
|
||||
// Flag to protect against _read
|
||||
// being called before last iteration completion.
|
||||
let reading = false
|
||||
readable._read = function () {
|
||||
if (!reading) {
|
||||
reading = true
|
||||
next()
|
||||
}
|
||||
}
|
||||
readable._destroy = function (error, cb) {
|
||||
PromisePrototypeThen(
|
||||
close(error),
|
||||
() => process.nextTick(cb, error),
|
||||
// nextTick is here in case cb throws
|
||||
(e) => process.nextTick(cb, e || error)
|
||||
)
|
||||
}
|
||||
async function close(error) {
|
||||
const hadError = error !== undefined && error !== null
|
||||
const hasThrow = typeof iterator.throw === 'function'
|
||||
if (hadError && hasThrow) {
|
||||
const { value, done } = await iterator.throw(error)
|
||||
await value
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (typeof iterator.return === 'function') {
|
||||
const { value } = await iterator.return()
|
||||
await value
|
||||
}
|
||||
}
|
||||
async function next() {
|
||||
for (;;) {
|
||||
try {
|
||||
const { value, done } = isAsync ? await iterator.next() : iterator.next()
|
||||
if (done) {
|
||||
readable.push(null)
|
||||
} else {
|
||||
const res = value && typeof value.then === 'function' ? await value : value
|
||||
if (res === null) {
|
||||
reading = false
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (readable.push(res)) {
|
||||
continue
|
||||
} else {
|
||||
reading = false
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
readable.destroy(err)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return readable
|
||||
}
|
||||
module.exports = from
|
||||
51
node_modules/bl/node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
51
node_modules/bl/node_modules/readable-stream/lib/internal/streams/lazy_transform.js
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
// LazyTransform is a special type of Transform stream that is lazily loaded.
|
||||
// This is used for performance with bi-API-ship: when two APIs are available
|
||||
// for the stream, one conventional and one non-conventional.
|
||||
'use strict'
|
||||
|
||||
const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const stream = require('../../stream')
|
||||
const { getDefaultEncoding } = require('../crypto/util')
|
||||
module.exports = LazyTransform
|
||||
function LazyTransform(options) {
|
||||
this._options = options
|
||||
}
|
||||
ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype)
|
||||
ObjectSetPrototypeOf(LazyTransform, stream.Transform)
|
||||
function makeGetter(name) {
|
||||
return function () {
|
||||
stream.Transform.call(this, this._options)
|
||||
this._writableState.decodeStrings = false
|
||||
if (!this._options || !this._options.defaultEncoding) {
|
||||
this._writableState.defaultEncoding = getDefaultEncoding()
|
||||
}
|
||||
return this[name]
|
||||
}
|
||||
}
|
||||
function makeSetter(name) {
|
||||
return function (val) {
|
||||
ObjectDefineProperty(this, name, {
|
||||
__proto__: null,
|
||||
value: val,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(LazyTransform.prototype, {
|
||||
_readableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_readableState'),
|
||||
set: makeSetter('_readableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
_writableState: {
|
||||
__proto__: null,
|
||||
get: makeGetter('_writableState'),
|
||||
set: makeSetter('_writableState'),
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
89
node_modules/bl/node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
89
node_modules/bl/node_modules/readable-stream/lib/internal/streams/legacy.js
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
'use strict'
|
||||
|
||||
const { ArrayIsArray, ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
const { EventEmitter: EE } = require('events')
|
||||
function Stream(opts) {
|
||||
EE.call(this, opts)
|
||||
}
|
||||
ObjectSetPrototypeOf(Stream.prototype, EE.prototype)
|
||||
ObjectSetPrototypeOf(Stream, EE)
|
||||
Stream.prototype.pipe = function (dest, options) {
|
||||
const source = this
|
||||
function ondata(chunk) {
|
||||
if (dest.writable && dest.write(chunk) === false && source.pause) {
|
||||
source.pause()
|
||||
}
|
||||
}
|
||||
source.on('data', ondata)
|
||||
function ondrain() {
|
||||
if (source.readable && source.resume) {
|
||||
source.resume()
|
||||
}
|
||||
}
|
||||
dest.on('drain', ondrain)
|
||||
|
||||
// If the 'end' option is not supplied, dest.end() will be called when
|
||||
// source gets the 'end' or 'close' events. Only dest.end() once.
|
||||
if (!dest._isStdio && (!options || options.end !== false)) {
|
||||
source.on('end', onend)
|
||||
source.on('close', onclose)
|
||||
}
|
||||
let didOnEnd = false
|
||||
function onend() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
dest.end()
|
||||
}
|
||||
function onclose() {
|
||||
if (didOnEnd) return
|
||||
didOnEnd = true
|
||||
if (typeof dest.destroy === 'function') dest.destroy()
|
||||
}
|
||||
|
||||
// Don't leave dangling pipes when there are errors.
|
||||
function onerror(er) {
|
||||
cleanup()
|
||||
if (EE.listenerCount(this, 'error') === 0) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
prependListener(source, 'error', onerror)
|
||||
prependListener(dest, 'error', onerror)
|
||||
|
||||
// Remove all the event listeners that were added.
|
||||
function cleanup() {
|
||||
source.removeListener('data', ondata)
|
||||
dest.removeListener('drain', ondrain)
|
||||
source.removeListener('end', onend)
|
||||
source.removeListener('close', onclose)
|
||||
source.removeListener('error', onerror)
|
||||
dest.removeListener('error', onerror)
|
||||
source.removeListener('end', cleanup)
|
||||
source.removeListener('close', cleanup)
|
||||
dest.removeListener('close', cleanup)
|
||||
}
|
||||
source.on('end', cleanup)
|
||||
source.on('close', cleanup)
|
||||
dest.on('close', cleanup)
|
||||
dest.emit('pipe', source)
|
||||
|
||||
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
||||
return dest
|
||||
}
|
||||
function prependListener(emitter, event, fn) {
|
||||
// Sadly this is not cacheable as some libraries bundle their own
|
||||
// event emitter implementation with them.
|
||||
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn)
|
||||
|
||||
// This is a hack to make sure that our error handler is attached before any
|
||||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||||
// to continue to work with older versions of Node.js that do not include
|
||||
// the prependListener() method. The goal is to eventually remove this hack.
|
||||
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn)
|
||||
else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn)
|
||||
else emitter._events[event] = [fn, emitter._events[event]]
|
||||
}
|
||||
module.exports = {
|
||||
Stream,
|
||||
prependListener
|
||||
}
|
||||
457
node_modules/bl/node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
457
node_modules/bl/node_modules/readable-stream/lib/internal/streams/operators.js
generated
vendored
Normal file
@ -0,0 +1,457 @@
|
||||
'use strict'
|
||||
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const {
|
||||
codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE },
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateAbortSignal, validateInteger, validateObject } = require('../validators')
|
||||
const kWeakHandler = require('../../ours/primordials').Symbol('kWeak')
|
||||
const kResistStopPropagation = require('../../ours/primordials').Symbol('kResistStopPropagation')
|
||||
const { finished } = require('./end-of-stream')
|
||||
const staticCompose = require('./compose')
|
||||
const { addAbortSignalNoValidate } = require('./add-abort-signal')
|
||||
const { isWritable, isNodeStream } = require('./utils')
|
||||
const { deprecate } = require('../../ours/util')
|
||||
const {
|
||||
ArrayPrototypePush,
|
||||
Boolean,
|
||||
MathFloor,
|
||||
Number,
|
||||
NumberIsNaN,
|
||||
Promise,
|
||||
PromiseReject,
|
||||
PromiseResolve,
|
||||
PromisePrototypeThen,
|
||||
Symbol
|
||||
} = require('../../ours/primordials')
|
||||
const kEmpty = Symbol('kEmpty')
|
||||
const kEof = Symbol('kEof')
|
||||
function compose(stream, options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
if (isNodeStream(stream) && !isWritable(stream)) {
|
||||
throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable')
|
||||
}
|
||||
const composedStream = staticCompose(this, stream)
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
// Not validating as we already validated before
|
||||
addAbortSignalNoValidate(options.signal, composedStream)
|
||||
}
|
||||
return composedStream
|
||||
}
|
||||
function map(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let concurrency = 1
|
||||
if ((options === null || options === undefined ? undefined : options.concurrency) != null) {
|
||||
concurrency = MathFloor(options.concurrency)
|
||||
}
|
||||
let highWaterMark = concurrency - 1
|
||||
if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) {
|
||||
highWaterMark = MathFloor(options.highWaterMark)
|
||||
}
|
||||
validateInteger(concurrency, 'options.concurrency', 1)
|
||||
validateInteger(highWaterMark, 'options.highWaterMark', 0)
|
||||
highWaterMark += concurrency
|
||||
return async function* map() {
|
||||
const signal = require('../../ours/util').AbortSignalAny(
|
||||
[options === null || options === undefined ? undefined : options.signal].filter(Boolean)
|
||||
)
|
||||
const stream = this
|
||||
const queue = []
|
||||
const signalOpt = {
|
||||
signal
|
||||
}
|
||||
let next
|
||||
let resume
|
||||
let done = false
|
||||
let cnt = 0
|
||||
function onCatch() {
|
||||
done = true
|
||||
afterItemProcessed()
|
||||
}
|
||||
function afterItemProcessed() {
|
||||
cnt -= 1
|
||||
maybeResume()
|
||||
}
|
||||
function maybeResume() {
|
||||
if (resume && !done && cnt < concurrency && queue.length < highWaterMark) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
async function pump() {
|
||||
try {
|
||||
for await (let val of stream) {
|
||||
if (done) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
try {
|
||||
val = fn(val, signalOpt)
|
||||
if (val === kEmpty) {
|
||||
continue
|
||||
}
|
||||
val = PromiseResolve(val)
|
||||
} catch (err) {
|
||||
val = PromiseReject(err)
|
||||
}
|
||||
cnt += 1
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {
|
||||
await new Promise((resolve) => {
|
||||
resume = resolve
|
||||
})
|
||||
}
|
||||
}
|
||||
queue.push(kEof)
|
||||
} catch (err) {
|
||||
const val = PromiseReject(err)
|
||||
PromisePrototypeThen(val, afterItemProcessed, onCatch)
|
||||
queue.push(val)
|
||||
} finally {
|
||||
done = true
|
||||
if (next) {
|
||||
next()
|
||||
next = null
|
||||
}
|
||||
}
|
||||
}
|
||||
pump()
|
||||
try {
|
||||
while (true) {
|
||||
while (queue.length > 0) {
|
||||
const val = await queue[0]
|
||||
if (val === kEof) {
|
||||
return
|
||||
}
|
||||
if (signal.aborted) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (val !== kEmpty) {
|
||||
yield val
|
||||
}
|
||||
queue.shift()
|
||||
maybeResume()
|
||||
}
|
||||
await new Promise((resolve) => {
|
||||
next = resolve
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
done = true
|
||||
if (resume) {
|
||||
resume()
|
||||
resume = null
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function asIndexedPairs(options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
return async function* asIndexedPairs() {
|
||||
let index = 0
|
||||
for await (const val of this) {
|
||||
var _options$signal
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal = options.signal) !== null &&
|
||||
_options$signal !== undefined &&
|
||||
_options$signal.aborted
|
||||
) {
|
||||
throw new AbortError({
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
yield [index++, val]
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
async function some(fn, options = undefined) {
|
||||
for await (const unused of filter.call(this, fn, options)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
async function every(fn, options = undefined) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
// https://en.wikipedia.org/wiki/De_Morgan%27s_laws
|
||||
return !(await some.call(
|
||||
this,
|
||||
async (...args) => {
|
||||
return !(await fn(...args))
|
||||
},
|
||||
options
|
||||
))
|
||||
}
|
||||
async function find(fn, options) {
|
||||
for await (const result of filter.call(this, fn, options)) {
|
||||
return result
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
async function forEach(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function forEachFn(value, options) {
|
||||
await fn(value, options)
|
||||
return kEmpty
|
||||
}
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const unused of map.call(this, forEachFn, options));
|
||||
}
|
||||
function filter(fn, options) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn)
|
||||
}
|
||||
async function filterFn(value, options) {
|
||||
if (await fn(value, options)) {
|
||||
return value
|
||||
}
|
||||
return kEmpty
|
||||
}
|
||||
return map.call(this, filterFn, options)
|
||||
}
|
||||
|
||||
// Specific to provide better error to reduce since the argument is only
|
||||
// missing if the stream has no items in it - but the code is still appropriate
|
||||
class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
|
||||
constructor() {
|
||||
super('reduce')
|
||||
this.message = 'Reduce of an empty stream requires an initial value'
|
||||
}
|
||||
}
|
||||
async function reduce(reducer, initialValue, options) {
|
||||
var _options$signal2
|
||||
if (typeof reducer !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer)
|
||||
}
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
let hasInitialValue = arguments.length > 1
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal2 = options.signal) !== null &&
|
||||
_options$signal2 !== undefined &&
|
||||
_options$signal2.aborted
|
||||
) {
|
||||
const err = new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
this.once('error', () => {}) // The error is already propagated
|
||||
await finished(this.destroy(err))
|
||||
throw err
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
if (options !== null && options !== undefined && options.signal) {
|
||||
const opts = {
|
||||
once: true,
|
||||
[kWeakHandler]: this,
|
||||
[kResistStopPropagation]: true
|
||||
}
|
||||
options.signal.addEventListener('abort', () => ac.abort(), opts)
|
||||
}
|
||||
let gotAnyItemFromStream = false
|
||||
try {
|
||||
for await (const value of this) {
|
||||
var _options$signal3
|
||||
gotAnyItemFromStream = true
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal3 = options.signal) !== null &&
|
||||
_options$signal3 !== undefined &&
|
||||
_options$signal3.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (!hasInitialValue) {
|
||||
initialValue = value
|
||||
hasInitialValue = true
|
||||
} else {
|
||||
initialValue = await reducer(initialValue, value, {
|
||||
signal
|
||||
})
|
||||
}
|
||||
}
|
||||
if (!gotAnyItemFromStream && !hasInitialValue) {
|
||||
throw new ReduceAwareErrMissingArgs()
|
||||
}
|
||||
} finally {
|
||||
ac.abort()
|
||||
}
|
||||
return initialValue
|
||||
}
|
||||
async function toArray(options) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
const result = []
|
||||
for await (const val of this) {
|
||||
var _options$signal4
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal4 = options.signal) !== null &&
|
||||
_options$signal4 !== undefined &&
|
||||
_options$signal4.aborted
|
||||
) {
|
||||
throw new AbortError(undefined, {
|
||||
cause: options.signal.reason
|
||||
})
|
||||
}
|
||||
ArrayPrototypePush(result, val)
|
||||
}
|
||||
return result
|
||||
}
|
||||
function flatMap(fn, options) {
|
||||
const values = map.call(this, fn, options)
|
||||
return async function* flatMap() {
|
||||
for await (const val of values) {
|
||||
yield* val
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function toIntegerOrInfinity(number) {
|
||||
// We coerce here to align with the spec
|
||||
// https://github.com/tc39/proposal-iterator-helpers/issues/169
|
||||
number = Number(number)
|
||||
if (NumberIsNaN(number)) {
|
||||
return 0
|
||||
}
|
||||
if (number < 0) {
|
||||
throw new ERR_OUT_OF_RANGE('number', '>= 0', number)
|
||||
}
|
||||
return number
|
||||
}
|
||||
function drop(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* drop() {
|
||||
var _options$signal5
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal5 = options.signal) !== null &&
|
||||
_options$signal5 !== undefined &&
|
||||
_options$signal5.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal6
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal6 = options.signal) !== null &&
|
||||
_options$signal6 !== undefined &&
|
||||
_options$signal6.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- <= 0) {
|
||||
yield val
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
function take(number, options = undefined) {
|
||||
if (options != null) {
|
||||
validateObject(options, 'options')
|
||||
}
|
||||
if ((options === null || options === undefined ? undefined : options.signal) != null) {
|
||||
validateAbortSignal(options.signal, 'options.signal')
|
||||
}
|
||||
number = toIntegerOrInfinity(number)
|
||||
return async function* take() {
|
||||
var _options$signal7
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal7 = options.signal) !== null &&
|
||||
_options$signal7 !== undefined &&
|
||||
_options$signal7.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
for await (const val of this) {
|
||||
var _options$signal8
|
||||
if (
|
||||
options !== null &&
|
||||
options !== undefined &&
|
||||
(_options$signal8 = options.signal) !== null &&
|
||||
_options$signal8 !== undefined &&
|
||||
_options$signal8.aborted
|
||||
) {
|
||||
throw new AbortError()
|
||||
}
|
||||
if (number-- > 0) {
|
||||
yield val
|
||||
}
|
||||
|
||||
// Don't get another item from iterator in case we reached the end
|
||||
if (number <= 0) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}.call(this)
|
||||
}
|
||||
module.exports.streamReturningOperators = {
|
||||
asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'),
|
||||
drop,
|
||||
filter,
|
||||
flatMap,
|
||||
map,
|
||||
take,
|
||||
compose
|
||||
}
|
||||
module.exports.promiseReturningOperators = {
|
||||
every,
|
||||
forEach,
|
||||
reduce,
|
||||
toArray,
|
||||
some,
|
||||
find
|
||||
}
|
||||
39
node_modules/bl/node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
39
node_modules/bl/node_modules/readable-stream/lib/internal/streams/passthrough.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf } = require('../../ours/primordials')
|
||||
module.exports = PassThrough
|
||||
const Transform = require('./transform')
|
||||
ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype)
|
||||
ObjectSetPrototypeOf(PassThrough, Transform)
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough)) return new PassThrough(options)
|
||||
Transform.call(this, options)
|
||||
}
|
||||
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
||||
cb(null, chunk)
|
||||
}
|
||||
471
node_modules/bl/node_modules/readable-stream/lib/internal/streams/pipeline.js
generated
vendored
Normal file
471
node_modules/bl/node_modules/readable-stream/lib/internal/streams/pipeline.js
generated
vendored
Normal file
@ -0,0 +1,471 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Ported from https://github.com/mafintosh/pump with
|
||||
// permission from the author, Mathias Buus (@mafintosh).
|
||||
|
||||
;('use strict')
|
||||
const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = require('../../ours/primordials')
|
||||
const eos = require('./end-of-stream')
|
||||
const { once } = require('../../ours/util')
|
||||
const destroyImpl = require('./destroy')
|
||||
const Duplex = require('./duplex')
|
||||
const {
|
||||
aggregateTwoErrors,
|
||||
codes: {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_INVALID_RETURN_VALUE,
|
||||
ERR_MISSING_ARGS,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_PREMATURE_CLOSE
|
||||
},
|
||||
AbortError
|
||||
} = require('../../ours/errors')
|
||||
const { validateFunction, validateAbortSignal } = require('../validators')
|
||||
const {
|
||||
isIterable,
|
||||
isReadable,
|
||||
isReadableNodeStream,
|
||||
isNodeStream,
|
||||
isTransformStream,
|
||||
isWebStream,
|
||||
isReadableStream,
|
||||
isReadableFinished
|
||||
} = require('./utils')
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
let PassThrough
|
||||
let Readable
|
||||
let addAbortListener
|
||||
function destroyer(stream, reading, writing) {
|
||||
let finished = false
|
||||
stream.on('close', () => {
|
||||
finished = true
|
||||
})
|
||||
const cleanup = eos(
|
||||
stream,
|
||||
{
|
||||
readable: reading,
|
||||
writable: writing
|
||||
},
|
||||
(err) => {
|
||||
finished = !err
|
||||
}
|
||||
)
|
||||
return {
|
||||
destroy: (err) => {
|
||||
if (finished) return
|
||||
finished = true
|
||||
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'))
|
||||
},
|
||||
cleanup
|
||||
}
|
||||
}
|
||||
function popCallback(streams) {
|
||||
// Streams should never be an empty array. It should always contain at least
|
||||
// a single stream. Therefore optimize for the average case instead of
|
||||
// checking for length === 0 as well.
|
||||
validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]')
|
||||
return streams.pop()
|
||||
}
|
||||
function makeAsyncIterable(val) {
|
||||
if (isIterable(val)) {
|
||||
return val
|
||||
} else if (isReadableNodeStream(val)) {
|
||||
// Legacy streams are not Iterable.
|
||||
return fromReadable(val)
|
||||
}
|
||||
throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val)
|
||||
}
|
||||
async function* fromReadable(val) {
|
||||
if (!Readable) {
|
||||
Readable = require('./readable')
|
||||
}
|
||||
yield* Readable.prototype[SymbolAsyncIterator].call(val)
|
||||
}
|
||||
async function pumpToNode(iterable, writable, finish, { end }) {
|
||||
let error
|
||||
let onresolve = null
|
||||
const resume = (err) => {
|
||||
if (err) {
|
||||
error = err
|
||||
}
|
||||
if (onresolve) {
|
||||
const callback = onresolve
|
||||
onresolve = null
|
||||
callback()
|
||||
}
|
||||
}
|
||||
const wait = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
onresolve = () => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
writable.on('drain', resume)
|
||||
const cleanup = eos(
|
||||
writable,
|
||||
{
|
||||
readable: false
|
||||
},
|
||||
resume
|
||||
)
|
||||
try {
|
||||
if (writable.writableNeedDrain) {
|
||||
await wait()
|
||||
}
|
||||
for await (const chunk of iterable) {
|
||||
if (!writable.write(chunk)) {
|
||||
await wait()
|
||||
}
|
||||
}
|
||||
if (end) {
|
||||
writable.end()
|
||||
await wait()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
finish(error !== err ? aggregateTwoErrors(error, err) : err)
|
||||
} finally {
|
||||
cleanup()
|
||||
writable.off('drain', resume)
|
||||
}
|
||||
}
|
||||
async function pumpToWeb(readable, writable, finish, { end }) {
|
||||
if (isTransformStream(writable)) {
|
||||
writable = writable.writable
|
||||
}
|
||||
// https://streams.spec.whatwg.org/#example-manual-write-with-backpressure
|
||||
const writer = writable.getWriter()
|
||||
try {
|
||||
for await (const chunk of readable) {
|
||||
await writer.ready
|
||||
writer.write(chunk).catch(() => {})
|
||||
}
|
||||
await writer.ready
|
||||
if (end) {
|
||||
await writer.close()
|
||||
}
|
||||
finish()
|
||||
} catch (err) {
|
||||
try {
|
||||
await writer.abort(err)
|
||||
finish(err)
|
||||
} catch (err) {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
function pipeline(...streams) {
|
||||
return pipelineImpl(streams, once(popCallback(streams)))
|
||||
}
|
||||
function pipelineImpl(streams, callback, opts) {
|
||||
if (streams.length === 1 && ArrayIsArray(streams[0])) {
|
||||
streams = streams[0]
|
||||
}
|
||||
if (streams.length < 2) {
|
||||
throw new ERR_MISSING_ARGS('streams')
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const signal = ac.signal
|
||||
const outerSignal = opts === null || opts === undefined ? undefined : opts.signal
|
||||
|
||||
// Need to cleanup event listeners if last stream is readable
|
||||
// https://github.com/nodejs/node/issues/35452
|
||||
const lastStreamCleanup = []
|
||||
validateAbortSignal(outerSignal, 'options.signal')
|
||||
function abort() {
|
||||
finishImpl(new AbortError())
|
||||
}
|
||||
addAbortListener = addAbortListener || require('../../ours/util').addAbortListener
|
||||
let disposable
|
||||
if (outerSignal) {
|
||||
disposable = addAbortListener(outerSignal, abort)
|
||||
}
|
||||
let error
|
||||
let value
|
||||
const destroys = []
|
||||
let finishCount = 0
|
||||
function finish(err) {
|
||||
finishImpl(err, --finishCount === 0)
|
||||
}
|
||||
function finishImpl(err, final) {
|
||||
var _disposable
|
||||
if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) {
|
||||
error = err
|
||||
}
|
||||
if (!error && !final) {
|
||||
return
|
||||
}
|
||||
while (destroys.length) {
|
||||
destroys.shift()(error)
|
||||
}
|
||||
;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]()
|
||||
ac.abort()
|
||||
if (final) {
|
||||
if (!error) {
|
||||
lastStreamCleanup.forEach((fn) => fn())
|
||||
}
|
||||
process.nextTick(callback, error, value)
|
||||
}
|
||||
}
|
||||
let ret
|
||||
for (let i = 0; i < streams.length; i++) {
|
||||
const stream = streams[i]
|
||||
const reading = i < streams.length - 1
|
||||
const writing = i > 0
|
||||
const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false
|
||||
const isLastStream = i === streams.length - 1
|
||||
if (isNodeStream(stream)) {
|
||||
if (end) {
|
||||
const { destroy, cleanup } = destroyer(stream, reading, writing)
|
||||
destroys.push(destroy)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
|
||||
// Catch stream errors that occur after pipe/pump has completed.
|
||||
function onError(err) {
|
||||
if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
stream.on('error', onError)
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(() => {
|
||||
stream.removeListener('error', onError)
|
||||
})
|
||||
}
|
||||
}
|
||||
if (i === 0) {
|
||||
if (typeof stream === 'function') {
|
||||
ret = stream({
|
||||
signal
|
||||
})
|
||||
if (!isIterable(ret)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret)
|
||||
}
|
||||
} else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) {
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
} else if (typeof stream === 'function') {
|
||||
if (isTransformStream(ret)) {
|
||||
var _ret
|
||||
ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable)
|
||||
} else {
|
||||
ret = makeAsyncIterable(ret)
|
||||
}
|
||||
ret = stream(ret, {
|
||||
signal
|
||||
})
|
||||
if (reading) {
|
||||
if (!isIterable(ret, true)) {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret)
|
||||
}
|
||||
} else {
|
||||
var _ret2
|
||||
if (!PassThrough) {
|
||||
PassThrough = require('./passthrough')
|
||||
}
|
||||
|
||||
// If the last argument to pipeline is not a stream
|
||||
// we must create a proxy stream so that pipeline(...)
|
||||
// always returns a stream which can be further
|
||||
// composed through `.pipe(stream)`.
|
||||
|
||||
const pt = new PassThrough({
|
||||
objectMode: true
|
||||
})
|
||||
|
||||
// Handle Promises/A+ spec, `then` could be a getter that throws on
|
||||
// second use.
|
||||
const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then
|
||||
if (typeof then === 'function') {
|
||||
finishCount++
|
||||
then.call(
|
||||
ret,
|
||||
(val) => {
|
||||
value = val
|
||||
if (val != null) {
|
||||
pt.write(val)
|
||||
}
|
||||
if (end) {
|
||||
pt.end()
|
||||
}
|
||||
process.nextTick(finish)
|
||||
},
|
||||
(err) => {
|
||||
pt.destroy(err)
|
||||
process.nextTick(finish, err)
|
||||
}
|
||||
)
|
||||
} else if (isIterable(ret, true)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isTransformStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, pt, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret)
|
||||
}
|
||||
ret = pt
|
||||
const { destroy, cleanup } = destroyer(ret, false, true)
|
||||
destroys.push(destroy)
|
||||
if (isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
}
|
||||
} else if (isNodeStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount += 2
|
||||
const cleanup = pipe(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
if (isReadable(stream) && isLastStream) {
|
||||
lastStreamCleanup.push(cleanup)
|
||||
}
|
||||
} else if (isTransformStream(ret) || isReadableStream(ret)) {
|
||||
const toRead = ret.readable || ret
|
||||
finishCount++
|
||||
pumpToNode(toRead, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToNode(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else if (isWebStream(stream)) {
|
||||
if (isReadableNodeStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(makeAsyncIterable(ret), stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isReadableStream(ret) || isIterable(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else if (isTransformStream(ret)) {
|
||||
finishCount++
|
||||
pumpToWeb(ret.readable, stream, finish, {
|
||||
end
|
||||
})
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE(
|
||||
'val',
|
||||
['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'],
|
||||
ret
|
||||
)
|
||||
}
|
||||
ret = stream
|
||||
} else {
|
||||
ret = Duplex.from(stream)
|
||||
}
|
||||
}
|
||||
if (
|
||||
(signal !== null && signal !== undefined && signal.aborted) ||
|
||||
(outerSignal !== null && outerSignal !== undefined && outerSignal.aborted)
|
||||
) {
|
||||
process.nextTick(abort)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
function pipe(src, dst, finish, { end }) {
|
||||
let ended = false
|
||||
dst.on('close', () => {
|
||||
if (!ended) {
|
||||
// Finish if the destination closes before the source has completed.
|
||||
finish(new ERR_STREAM_PREMATURE_CLOSE())
|
||||
}
|
||||
})
|
||||
src.pipe(dst, {
|
||||
end: false
|
||||
}) // If end is true we already will have a listener to end dst.
|
||||
|
||||
if (end) {
|
||||
// Compat. Before node v10.12.0 stdio used to throw an error so
|
||||
// pipe() did/does not end() stdio destinations.
|
||||
// Now they allow it but "secretly" don't close the underlying fd.
|
||||
|
||||
function endFn() {
|
||||
ended = true
|
||||
dst.end()
|
||||
}
|
||||
if (isReadableFinished(src)) {
|
||||
// End the destination if the source has already ended.
|
||||
process.nextTick(endFn)
|
||||
} else {
|
||||
src.once('end', endFn)
|
||||
}
|
||||
} else {
|
||||
finish()
|
||||
}
|
||||
eos(
|
||||
src,
|
||||
{
|
||||
readable: true,
|
||||
writable: false
|
||||
},
|
||||
(err) => {
|
||||
const rState = src._readableState
|
||||
if (
|
||||
err &&
|
||||
err.code === 'ERR_STREAM_PREMATURE_CLOSE' &&
|
||||
rState &&
|
||||
rState.ended &&
|
||||
!rState.errored &&
|
||||
!rState.errorEmitted
|
||||
) {
|
||||
// Some readable streams will emit 'close' before 'end'. However, since
|
||||
// this is on the readable side 'end' should still be emitted if the
|
||||
// stream has been ended and no error emitted. This should be allowed in
|
||||
// favor of backwards compatibility. Since the stream is piped to a
|
||||
// destination this should not result in any observable difference.
|
||||
// We don't need to check if this is a writable premature close since
|
||||
// eos will only fail with premature close on the reading side for
|
||||
// duplex streams.
|
||||
src.once('end', finish).once('error', finish)
|
||||
} else {
|
||||
finish(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
return eos(
|
||||
dst,
|
||||
{
|
||||
readable: false,
|
||||
writable: true
|
||||
},
|
||||
finish
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
pipelineImpl,
|
||||
pipeline
|
||||
}
|
||||
1288
node_modules/bl/node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
1288
node_modules/bl/node_modules/readable-stream/lib/internal/streams/readable.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
39
node_modules/bl/node_modules/readable-stream/lib/internal/streams/state.js
generated
vendored
Normal file
39
node_modules/bl/node_modules/readable-stream/lib/internal/streams/state.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
'use strict'
|
||||
|
||||
const { MathFloor, NumberIsInteger } = require('../../ours/primordials')
|
||||
const { validateInteger } = require('../validators')
|
||||
const { ERR_INVALID_ARG_VALUE } = require('../../ours/errors').codes
|
||||
let defaultHighWaterMarkBytes = 16 * 1024
|
||||
let defaultHighWaterMarkObjectMode = 16
|
||||
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
||||
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null
|
||||
}
|
||||
function getDefaultHighWaterMark(objectMode) {
|
||||
return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes
|
||||
}
|
||||
function setDefaultHighWaterMark(objectMode, value) {
|
||||
validateInteger(value, 'value', 0)
|
||||
if (objectMode) {
|
||||
defaultHighWaterMarkObjectMode = value
|
||||
} else {
|
||||
defaultHighWaterMarkBytes = value
|
||||
}
|
||||
}
|
||||
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
||||
const hwm = highWaterMarkFrom(options, isDuplex, duplexKey)
|
||||
if (hwm != null) {
|
||||
if (!NumberIsInteger(hwm) || hwm < 0) {
|
||||
const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark'
|
||||
throw new ERR_INVALID_ARG_VALUE(name, hwm)
|
||||
}
|
||||
return MathFloor(hwm)
|
||||
}
|
||||
|
||||
// Default value
|
||||
return getDefaultHighWaterMark(state.objectMode)
|
||||
}
|
||||
module.exports = {
|
||||
getHighWaterMark,
|
||||
getDefaultHighWaterMark,
|
||||
setDefaultHighWaterMark
|
||||
}
|
||||
180
node_modules/bl/node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
180
node_modules/bl/node_modules/readable-stream/lib/internal/streams/transform.js
generated
vendored
Normal file
@ -0,0 +1,180 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict'
|
||||
|
||||
const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
|
||||
module.exports = Transform
|
||||
const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
|
||||
const Duplex = require('./duplex')
|
||||
const { getHighWaterMark } = require('./state')
|
||||
ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
|
||||
ObjectSetPrototypeOf(Transform, Duplex)
|
||||
const kCallback = Symbol('kCallback')
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform)) return new Transform(options)
|
||||
|
||||
// TODO (ronag): This should preferably always be
|
||||
// applied but would be semver-major. Or even better;
|
||||
// make Transform a Readable with the Writable interface.
|
||||
const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null
|
||||
if (readableHighWaterMark === 0) {
|
||||
// A Duplex will buffer both on the writable and readable side while
|
||||
// a Transform just wants to buffer hwm number of elements. To avoid
|
||||
// buffering twice we disable buffering on the writable side.
|
||||
options = {
|
||||
...options,
|
||||
highWaterMark: null,
|
||||
readableHighWaterMark,
|
||||
// TODO (ronag): 0 is not optimal since we have
|
||||
// a "bug" where we check needDrain before calling _write and not after.
|
||||
// Refs: https://github.com/nodejs/node/pull/32887
|
||||
// Refs: https://github.com/nodejs/node/pull/35941
|
||||
writableHighWaterMark: options.writableHighWaterMark || 0
|
||||
}
|
||||
}
|
||||
Duplex.call(this, options)
|
||||
|
||||
// We have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false
|
||||
this[kCallback] = null
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function') this._transform = options.transform
|
||||
if (typeof options.flush === 'function') this._flush = options.flush
|
||||
}
|
||||
|
||||
// When the writable side finishes, then flush out anything remaining.
|
||||
// Backwards compat. Some Transform streams incorrectly implement _final
|
||||
// instead of or in addition to _flush. By using 'prefinish' instead of
|
||||
// implementing _final we continue supporting this unfortunate use case.
|
||||
this.on('prefinish', prefinish)
|
||||
}
|
||||
function final(cb) {
|
||||
if (typeof this._flush === 'function' && !this.destroyed) {
|
||||
this._flush((er, data) => {
|
||||
if (er) {
|
||||
if (cb) {
|
||||
cb(er)
|
||||
} else {
|
||||
this.destroy(er)
|
||||
}
|
||||
return
|
||||
}
|
||||
if (data != null) {
|
||||
this.push(data)
|
||||
}
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.push(null)
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
function prefinish() {
|
||||
if (this._final !== final) {
|
||||
final.call(this)
|
||||
}
|
||||
}
|
||||
Transform.prototype._final = final
|
||||
Transform.prototype._transform = function (chunk, encoding, callback) {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
|
||||
}
|
||||
Transform.prototype._write = function (chunk, encoding, callback) {
|
||||
const rState = this._readableState
|
||||
const wState = this._writableState
|
||||
const length = rState.length
|
||||
this._transform(chunk, encoding, (err, val) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
return
|
||||
}
|
||||
if (val != null) {
|
||||
this.push(val)
|
||||
}
|
||||
if (
|
||||
wState.ended ||
|
||||
// Backwards compat.
|
||||
length === rState.length ||
|
||||
// Backwards compat.
|
||||
rState.length < rState.highWaterMark
|
||||
) {
|
||||
callback()
|
||||
} else {
|
||||
this[kCallback] = callback
|
||||
}
|
||||
})
|
||||
}
|
||||
Transform.prototype._read = function () {
|
||||
if (this[kCallback]) {
|
||||
const callback = this[kCallback]
|
||||
this[kCallback] = null
|
||||
callback()
|
||||
}
|
||||
}
|
||||
329
node_modules/bl/node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
329
node_modules/bl/node_modules/readable-stream/lib/internal/streams/utils.js
generated
vendored
Normal file
@ -0,0 +1,329 @@
|
||||
'use strict'
|
||||
|
||||
const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = require('../../ours/primordials')
|
||||
|
||||
// We need to use SymbolFor to make these globally available
|
||||
// for interopt with readable-stream, i.e. readable-stream
|
||||
// and node core needs to be able to read/write private state
|
||||
// from each other for proper interoperability.
|
||||
const kIsDestroyed = SymbolFor('nodejs.stream.destroyed')
|
||||
const kIsErrored = SymbolFor('nodejs.stream.errored')
|
||||
const kIsReadable = SymbolFor('nodejs.stream.readable')
|
||||
const kIsWritable = SymbolFor('nodejs.stream.writable')
|
||||
const kIsDisturbed = SymbolFor('nodejs.stream.disturbed')
|
||||
const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise')
|
||||
const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction')
|
||||
function isReadableNodeStream(obj, strict = false) {
|
||||
var _obj$_readableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) &&
|
||||
(!obj._writableState ||
|
||||
((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined
|
||||
? undefined
|
||||
: _obj$_readableState.readable) !== false) &&
|
||||
// Duplex
|
||||
(!obj._writableState || obj._readableState)
|
||||
) // Writable has .pipe.
|
||||
)
|
||||
}
|
||||
|
||||
function isWritableNodeStream(obj) {
|
||||
var _obj$_writableState
|
||||
return !!(
|
||||
(
|
||||
obj &&
|
||||
typeof obj.write === 'function' &&
|
||||
typeof obj.on === 'function' &&
|
||||
(!obj._readableState ||
|
||||
((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined
|
||||
? undefined
|
||||
: _obj$_writableState.writable) !== false)
|
||||
) // Duplex
|
||||
)
|
||||
}
|
||||
|
||||
function isDuplexNodeStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
typeof obj.pipe === 'function' &&
|
||||
obj._readableState &&
|
||||
typeof obj.on === 'function' &&
|
||||
typeof obj.write === 'function'
|
||||
)
|
||||
}
|
||||
function isNodeStream(obj) {
|
||||
return (
|
||||
obj &&
|
||||
(obj._readableState ||
|
||||
obj._writableState ||
|
||||
(typeof obj.write === 'function' && typeof obj.on === 'function') ||
|
||||
(typeof obj.pipe === 'function' && typeof obj.on === 'function'))
|
||||
)
|
||||
}
|
||||
function isReadableStream(obj) {
|
||||
return !!(
|
||||
obj &&
|
||||
!isNodeStream(obj) &&
|
||||
typeof obj.pipeThrough === 'function' &&
|
||||
typeof obj.getReader === 'function' &&
|
||||
typeof obj.cancel === 'function'
|
||||
)
|
||||
}
|
||||
function isWritableStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function')
|
||||
}
|
||||
function isTransformStream(obj) {
|
||||
return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object')
|
||||
}
|
||||
function isWebStream(obj) {
|
||||
return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj)
|
||||
}
|
||||
function isIterable(obj, isAsync) {
|
||||
if (obj == null) return false
|
||||
if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'
|
||||
if (isAsync === false) return typeof obj[SymbolIterator] === 'function'
|
||||
return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function'
|
||||
}
|
||||
function isDestroyed(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed))
|
||||
}
|
||||
|
||||
// Have been end():d.
|
||||
function isWritableEnded(stream) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableEnded === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null
|
||||
return wState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'finish'.
|
||||
function isWritableFinished(stream, strict) {
|
||||
if (!isWritableNodeStream(stream)) return null
|
||||
if (stream.writableFinished === true) return true
|
||||
const wState = stream._writableState
|
||||
if (wState !== null && wState !== undefined && wState.errored) return false
|
||||
if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null
|
||||
return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0))
|
||||
}
|
||||
|
||||
// Have been push(null):d.
|
||||
function isReadableEnded(stream) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
if (stream.readableEnded === true) return true
|
||||
const rState = stream._readableState
|
||||
if (!rState || rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null
|
||||
return rState.ended
|
||||
}
|
||||
|
||||
// Have emitted 'end'.
|
||||
function isReadableFinished(stream, strict) {
|
||||
if (!isReadableNodeStream(stream)) return null
|
||||
const rState = stream._readableState
|
||||
if (rState !== null && rState !== undefined && rState.errored) return false
|
||||
if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null
|
||||
return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0))
|
||||
}
|
||||
function isReadable(stream) {
|
||||
if (stream && stream[kIsReadable] != null) return stream[kIsReadable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream)
|
||||
}
|
||||
function isWritable(stream) {
|
||||
if (stream && stream[kIsWritable] != null) return stream[kIsWritable]
|
||||
if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null
|
||||
if (isDestroyed(stream)) return false
|
||||
return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream)
|
||||
}
|
||||
function isFinished(stream, opts) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (isDestroyed(stream)) {
|
||||
return true
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) {
|
||||
return false
|
||||
}
|
||||
if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
function isWritableErrored(stream) {
|
||||
var _stream$_writableStat, _stream$_writableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.writableErrored) {
|
||||
return stream.writableErrored
|
||||
}
|
||||
return (_stream$_writableStat =
|
||||
(_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined
|
||||
? _stream$_writableStat
|
||||
: null
|
||||
}
|
||||
function isReadableErrored(stream) {
|
||||
var _stream$_readableStat, _stream$_readableStat2
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (stream.readableErrored) {
|
||||
return stream.readableErrored
|
||||
}
|
||||
return (_stream$_readableStat =
|
||||
(_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined
|
||||
? _stream$_readableStat
|
||||
: null
|
||||
}
|
||||
function isClosed(stream) {
|
||||
if (!isNodeStream(stream)) {
|
||||
return null
|
||||
}
|
||||
if (typeof stream.closed === 'boolean') {
|
||||
return stream.closed
|
||||
}
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
if (
|
||||
typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' ||
|
||||
typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean'
|
||||
) {
|
||||
return (
|
||||
(wState === null || wState === undefined ? undefined : wState.closed) ||
|
||||
(rState === null || rState === undefined ? undefined : rState.closed)
|
||||
)
|
||||
}
|
||||
if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) {
|
||||
return stream._closed
|
||||
}
|
||||
return null
|
||||
}
|
||||
function isOutgoingMessage(stream) {
|
||||
return (
|
||||
typeof stream._closed === 'boolean' &&
|
||||
typeof stream._defaultKeepAlive === 'boolean' &&
|
||||
typeof stream._removedConnection === 'boolean' &&
|
||||
typeof stream._removedContLen === 'boolean'
|
||||
)
|
||||
}
|
||||
function isServerResponse(stream) {
|
||||
return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream)
|
||||
}
|
||||
function isServerRequest(stream) {
|
||||
var _stream$req
|
||||
return (
|
||||
typeof stream._consuming === 'boolean' &&
|
||||
typeof stream._dumped === 'boolean' &&
|
||||
((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) ===
|
||||
undefined
|
||||
)
|
||||
}
|
||||
function willEmitClose(stream) {
|
||||
if (!isNodeStream(stream)) return null
|
||||
const wState = stream._writableState
|
||||
const rState = stream._readableState
|
||||
const state = wState || rState
|
||||
return (
|
||||
(!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false)
|
||||
)
|
||||
}
|
||||
function isDisturbed(stream) {
|
||||
var _stream$kIsDisturbed
|
||||
return !!(
|
||||
stream &&
|
||||
((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined
|
||||
? _stream$kIsDisturbed
|
||||
: stream.readableDidRead || stream.readableAborted)
|
||||
)
|
||||
}
|
||||
function isErrored(stream) {
|
||||
var _ref,
|
||||
_ref2,
|
||||
_ref3,
|
||||
_ref4,
|
||||
_ref5,
|
||||
_stream$kIsErrored,
|
||||
_stream$_readableStat3,
|
||||
_stream$_writableStat3,
|
||||
_stream$_readableStat4,
|
||||
_stream$_writableStat4
|
||||
return !!(
|
||||
stream &&
|
||||
((_ref =
|
||||
(_ref2 =
|
||||
(_ref3 =
|
||||
(_ref4 =
|
||||
(_ref5 =
|
||||
(_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined
|
||||
? _stream$kIsErrored
|
||||
: stream.readableErrored) !== null && _ref5 !== undefined
|
||||
? _ref5
|
||||
: stream.writableErrored) !== null && _ref4 !== undefined
|
||||
? _ref4
|
||||
: (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined
|
||||
? _ref3
|
||||
: (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined
|
||||
? _ref2
|
||||
: (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_readableStat4.errored) !== null && _ref !== undefined
|
||||
? _ref
|
||||
: (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined
|
||||
? undefined
|
||||
: _stream$_writableStat4.errored)
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isDestroyed,
|
||||
kIsDestroyed,
|
||||
isDisturbed,
|
||||
kIsDisturbed,
|
||||
isErrored,
|
||||
kIsErrored,
|
||||
isReadable,
|
||||
kIsReadable,
|
||||
kIsClosedPromise,
|
||||
kControllerErrorFunction,
|
||||
kIsWritable,
|
||||
isClosed,
|
||||
isDuplexNodeStream,
|
||||
isFinished,
|
||||
isIterable,
|
||||
isReadableNodeStream,
|
||||
isReadableStream,
|
||||
isReadableEnded,
|
||||
isReadableFinished,
|
||||
isReadableErrored,
|
||||
isNodeStream,
|
||||
isWebStream,
|
||||
isWritable,
|
||||
isWritableNodeStream,
|
||||
isWritableStream,
|
||||
isWritableEnded,
|
||||
isWritableFinished,
|
||||
isWritableErrored,
|
||||
isServerRequest,
|
||||
isServerResponse,
|
||||
willEmitClose,
|
||||
isTransformStream
|
||||
}
|
||||
817
node_modules/bl/node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
817
node_modules/bl/node_modules/readable-stream/lib/internal/streams/writable.js
generated
vendored
Normal file
@ -0,0 +1,817 @@
|
||||
/* replacement start */
|
||||
|
||||
const process = require('process/')
|
||||
|
||||
/* replacement end */
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
;('use strict')
|
||||
const {
|
||||
ArrayPrototypeSlice,
|
||||
Error,
|
||||
FunctionPrototypeSymbolHasInstance,
|
||||
ObjectDefineProperty,
|
||||
ObjectDefineProperties,
|
||||
ObjectSetPrototypeOf,
|
||||
StringPrototypeToLowerCase,
|
||||
Symbol,
|
||||
SymbolHasInstance
|
||||
} = require('../../ours/primordials')
|
||||
module.exports = Writable
|
||||
Writable.WritableState = WritableState
|
||||
const { EventEmitter: EE } = require('events')
|
||||
const Stream = require('./legacy').Stream
|
||||
const { Buffer } = require('buffer')
|
||||
const destroyImpl = require('./destroy')
|
||||
const { addAbortSignal } = require('./add-abort-signal')
|
||||
const { getHighWaterMark, getDefaultHighWaterMark } = require('./state')
|
||||
const {
|
||||
ERR_INVALID_ARG_TYPE,
|
||||
ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK,
|
||||
ERR_STREAM_CANNOT_PIPE,
|
||||
ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_ALREADY_FINISHED,
|
||||
ERR_STREAM_NULL_VALUES,
|
||||
ERR_STREAM_WRITE_AFTER_END,
|
||||
ERR_UNKNOWN_ENCODING
|
||||
} = require('../../ours/errors').codes
|
||||
const { errorOrDestroy } = destroyImpl
|
||||
ObjectSetPrototypeOf(Writable.prototype, Stream.prototype)
|
||||
ObjectSetPrototypeOf(Writable, Stream)
|
||||
function nop() {}
|
||||
const kOnFinished = Symbol('kOnFinished')
|
||||
function WritableState(options, stream, isDuplex) {
|
||||
// Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream,
|
||||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof require('./duplex')
|
||||
|
||||
// Object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!(options && options.objectMode)
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode)
|
||||
|
||||
// The point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write().
|
||||
this.highWaterMark = options
|
||||
? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex)
|
||||
: getDefaultHighWaterMark(false)
|
||||
|
||||
// if _final has been called.
|
||||
this.finalCalled = false
|
||||
|
||||
// drain event flag.
|
||||
this.needDrain = false
|
||||
// At the start of calling end()
|
||||
this.ending = false
|
||||
// When end() has been called, and returned.
|
||||
this.ended = false
|
||||
// When 'finish' is emitted.
|
||||
this.finished = false
|
||||
|
||||
// Has it been destroyed
|
||||
this.destroyed = false
|
||||
|
||||
// Should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
const noDecode = !!(options && options.decodeStrings === false)
|
||||
this.decodeStrings = !noDecode
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = (options && options.defaultEncoding) || 'utf8'
|
||||
|
||||
// Not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0
|
||||
|
||||
// A flag to see when we're in the middle of a write.
|
||||
this.writing = false
|
||||
|
||||
// When true all writes will be buffered until .uncork() call.
|
||||
this.corked = 0
|
||||
|
||||
// A flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true
|
||||
|
||||
// A flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false
|
||||
|
||||
// The callback that's passed to _write(chunk, cb).
|
||||
this.onwrite = onwrite.bind(undefined, stream)
|
||||
|
||||
// The callback that the user supplies to write(chunk, encoding, cb).
|
||||
this.writecb = null
|
||||
|
||||
// The amount that is being written when _write is called.
|
||||
this.writelen = 0
|
||||
|
||||
// Storage for data passed to the afterWrite() callback in case of
|
||||
// synchronous _write() completion.
|
||||
this.afterWriteTickInfo = null
|
||||
resetBuffer(this)
|
||||
|
||||
// Number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted.
|
||||
this.pendingcb = 0
|
||||
|
||||
// Stream is still being constructed and cannot be
|
||||
// destroyed until construction finished or failed.
|
||||
// Async construction is opt in, therefore we start as
|
||||
// constructed.
|
||||
this.constructed = true
|
||||
|
||||
// Emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams.
|
||||
this.prefinished = false
|
||||
|
||||
// True if the error was already emitted and should not be thrown again.
|
||||
this.errorEmitted = false
|
||||
|
||||
// Should close be emitted on destroy. Defaults to true.
|
||||
this.emitClose = !options || options.emitClose !== false
|
||||
|
||||
// Should .destroy() be called after 'finish' (and potentially 'end').
|
||||
this.autoDestroy = !options || options.autoDestroy !== false
|
||||
|
||||
// Indicates whether the stream has errored. When true all write() calls
|
||||
// should return false. This is needed since when autoDestroy
|
||||
// is disabled we need a way to tell whether the stream has failed.
|
||||
this.errored = null
|
||||
|
||||
// Indicates whether the stream has finished destroying.
|
||||
this.closed = false
|
||||
|
||||
// True if close has been emitted or would have been emitted
|
||||
// depending on emitClose.
|
||||
this.closeEmitted = false
|
||||
this[kOnFinished] = []
|
||||
}
|
||||
function resetBuffer(state) {
|
||||
state.buffered = []
|
||||
state.bufferedIndex = 0
|
||||
state.allBuffers = true
|
||||
state.allNoop = true
|
||||
}
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
return ArrayPrototypeSlice(this.buffered, this.bufferedIndex)
|
||||
}
|
||||
ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this.buffered.length - this.bufferedIndex
|
||||
}
|
||||
})
|
||||
function Writable(options) {
|
||||
// Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
|
||||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||||
// the WritableState constructor, at least with V8 6.5.
|
||||
const isDuplex = this instanceof require('./duplex')
|
||||
if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options)
|
||||
this._writableState = new WritableState(options, this, isDuplex)
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write
|
||||
if (typeof options.writev === 'function') this._writev = options.writev
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy
|
||||
if (typeof options.final === 'function') this._final = options.final
|
||||
if (typeof options.construct === 'function') this._construct = options.construct
|
||||
if (options.signal) addAbortSignal(options.signal, this)
|
||||
}
|
||||
Stream.call(this, options)
|
||||
destroyImpl.construct(this, () => {
|
||||
const state = this._writableState
|
||||
if (!state.writing) {
|
||||
clearBuffer(this, state)
|
||||
}
|
||||
finishMaybe(this, state)
|
||||
})
|
||||
}
|
||||
ObjectDefineProperty(Writable, SymbolHasInstance, {
|
||||
__proto__: null,
|
||||
value: function (object) {
|
||||
if (FunctionPrototypeSymbolHasInstance(this, object)) return true
|
||||
if (this !== Writable) return false
|
||||
return object && object._writableState instanceof WritableState
|
||||
}
|
||||
})
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function () {
|
||||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE())
|
||||
}
|
||||
function _write(stream, chunk, encoding, cb) {
|
||||
const state = stream._writableState
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = state.defaultEncoding
|
||||
} else {
|
||||
if (!encoding) encoding = state.defaultEncoding
|
||||
else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
if (typeof cb !== 'function') cb = nop
|
||||
}
|
||||
if (chunk === null) {
|
||||
throw new ERR_STREAM_NULL_VALUES()
|
||||
} else if (!state.objectMode) {
|
||||
if (typeof chunk === 'string') {
|
||||
if (state.decodeStrings !== false) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
encoding = 'buffer'
|
||||
}
|
||||
} else if (chunk instanceof Buffer) {
|
||||
encoding = 'buffer'
|
||||
} else if (Stream._isUint8Array(chunk)) {
|
||||
chunk = Stream._uint8ArrayToBuffer(chunk)
|
||||
encoding = 'buffer'
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk)
|
||||
}
|
||||
}
|
||||
let err
|
||||
if (state.ending) {
|
||||
err = new ERR_STREAM_WRITE_AFTER_END()
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('write')
|
||||
}
|
||||
if (err) {
|
||||
process.nextTick(cb, err)
|
||||
errorOrDestroy(stream, err, true)
|
||||
return err
|
||||
}
|
||||
state.pendingcb++
|
||||
return writeOrBuffer(stream, state, chunk, encoding, cb)
|
||||
}
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
return _write(this, chunk, encoding, cb) === true
|
||||
}
|
||||
Writable.prototype.cork = function () {
|
||||
this._writableState.corked++
|
||||
}
|
||||
Writable.prototype.uncork = function () {
|
||||
const state = this._writableState
|
||||
if (state.corked) {
|
||||
state.corked--
|
||||
if (!state.writing) clearBuffer(this, state)
|
||||
}
|
||||
}
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding)
|
||||
if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding)
|
||||
this._writableState.defaultEncoding = encoding
|
||||
return this
|
||||
}
|
||||
|
||||
// If we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, chunk, encoding, callback) {
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length += len
|
||||
|
||||
// stream._write resets state.length
|
||||
const ret = state.length < state.highWaterMark
|
||||
// We must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret) state.needDrain = true
|
||||
if (state.writing || state.corked || state.errored || !state.constructed) {
|
||||
state.buffered.push({
|
||||
chunk,
|
||||
encoding,
|
||||
callback
|
||||
})
|
||||
if (state.allBuffers && encoding !== 'buffer') {
|
||||
state.allBuffers = false
|
||||
}
|
||||
if (state.allNoop && callback !== nop) {
|
||||
state.allNoop = false
|
||||
}
|
||||
} else {
|
||||
state.writelen = len
|
||||
state.writecb = callback
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
|
||||
// Return false if errored or destroyed in order to break
|
||||
// any synchronous while(stream.write(data)) loops.
|
||||
return ret && !state.errored && !state.destroyed
|
||||
}
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len
|
||||
state.writecb = cb
|
||||
state.writing = true
|
||||
state.sync = true
|
||||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'))
|
||||
else if (writev) stream._writev(chunk, state.onwrite)
|
||||
else stream._write(chunk, encoding, state.onwrite)
|
||||
state.sync = false
|
||||
}
|
||||
function onwriteError(stream, state, er, cb) {
|
||||
--state.pendingcb
|
||||
cb(er)
|
||||
// Ensure callbacks are invoked even when autoDestroy is
|
||||
// not enabled. Passing `er` here doesn't make sense since
|
||||
// it's related to one specific write, not to the buffered
|
||||
// writes.
|
||||
errorBuffer(state)
|
||||
// This can emit error, but error must always follow cb.
|
||||
errorOrDestroy(stream, er)
|
||||
}
|
||||
function onwrite(stream, er) {
|
||||
const state = stream._writableState
|
||||
const sync = state.sync
|
||||
const cb = state.writecb
|
||||
if (typeof cb !== 'function') {
|
||||
errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
state.writing = false
|
||||
state.writecb = null
|
||||
state.length -= state.writelen
|
||||
state.writelen = 0
|
||||
if (er) {
|
||||
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
|
||||
er.stack // eslint-disable-line no-unused-expressions
|
||||
|
||||
if (!state.errored) {
|
||||
state.errored = er
|
||||
}
|
||||
|
||||
// In case of duplex streams we need to notify the readable side of the
|
||||
// error.
|
||||
if (stream._readableState && !stream._readableState.errored) {
|
||||
stream._readableState.errored = er
|
||||
}
|
||||
if (sync) {
|
||||
process.nextTick(onwriteError, stream, state, er, cb)
|
||||
} else {
|
||||
onwriteError(stream, state, er, cb)
|
||||
}
|
||||
} else {
|
||||
if (state.buffered.length > state.bufferedIndex) {
|
||||
clearBuffer(stream, state)
|
||||
}
|
||||
if (sync) {
|
||||
// It is a common case that the callback passed to .write() is always
|
||||
// the same. In that case, we do not schedule a new nextTick(), but
|
||||
// rather just increase a counter, to improve performance and avoid
|
||||
// memory allocations.
|
||||
if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) {
|
||||
state.afterWriteTickInfo.count++
|
||||
} else {
|
||||
state.afterWriteTickInfo = {
|
||||
count: 1,
|
||||
cb,
|
||||
stream,
|
||||
state
|
||||
}
|
||||
process.nextTick(afterWriteTick, state.afterWriteTickInfo)
|
||||
}
|
||||
} else {
|
||||
afterWrite(stream, state, 1, cb)
|
||||
}
|
||||
}
|
||||
}
|
||||
function afterWriteTick({ stream, state, count, cb }) {
|
||||
state.afterWriteTickInfo = null
|
||||
return afterWrite(stream, state, count, cb)
|
||||
}
|
||||
function afterWrite(stream, state, count, cb) {
|
||||
const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain
|
||||
if (needDrain) {
|
||||
state.needDrain = false
|
||||
stream.emit('drain')
|
||||
}
|
||||
while (count-- > 0) {
|
||||
state.pendingcb--
|
||||
cb()
|
||||
}
|
||||
if (state.destroyed) {
|
||||
errorBuffer(state)
|
||||
}
|
||||
finishMaybe(stream, state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then invoke callbacks.
|
||||
function errorBuffer(state) {
|
||||
if (state.writing) {
|
||||
return
|
||||
}
|
||||
for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
|
||||
var _state$errored
|
||||
const { chunk, callback } = state.buffered[n]
|
||||
const len = state.objectMode ? 1 : chunk.length
|
||||
state.length -= len
|
||||
callback(
|
||||
(_state$errored = state.errored) !== null && _state$errored !== undefined
|
||||
? _state$errored
|
||||
: new ERR_STREAM_DESTROYED('write')
|
||||
)
|
||||
}
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
var _state$errored2
|
||||
onfinishCallbacks[i](
|
||||
(_state$errored2 = state.errored) !== null && _state$errored2 !== undefined
|
||||
? _state$errored2
|
||||
: new ERR_STREAM_DESTROYED('end')
|
||||
)
|
||||
}
|
||||
resetBuffer(state)
|
||||
}
|
||||
|
||||
// If there's something in the buffer waiting, then process it.
|
||||
function clearBuffer(stream, state) {
|
||||
if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) {
|
||||
return
|
||||
}
|
||||
const { buffered, bufferedIndex, objectMode } = state
|
||||
const bufferedLength = buffered.length - bufferedIndex
|
||||
if (!bufferedLength) {
|
||||
return
|
||||
}
|
||||
let i = bufferedIndex
|
||||
state.bufferProcessing = true
|
||||
if (bufferedLength > 1 && stream._writev) {
|
||||
state.pendingcb -= bufferedLength - 1
|
||||
const callback = state.allNoop
|
||||
? nop
|
||||
: (err) => {
|
||||
for (let n = i; n < buffered.length; ++n) {
|
||||
buffered[n].callback(err)
|
||||
}
|
||||
}
|
||||
// Make a copy of `buffered` if it's going to be used by `callback` above,
|
||||
// since `doWrite` will mutate the array.
|
||||
const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i)
|
||||
chunks.allBuffers = state.allBuffers
|
||||
doWrite(stream, state, true, state.length, chunks, '', callback)
|
||||
resetBuffer(state)
|
||||
} else {
|
||||
do {
|
||||
const { chunk, encoding, callback } = buffered[i]
|
||||
buffered[i++] = null
|
||||
const len = objectMode ? 1 : chunk.length
|
||||
doWrite(stream, state, false, len, chunk, encoding, callback)
|
||||
} while (i < buffered.length && !state.writing)
|
||||
if (i === buffered.length) {
|
||||
resetBuffer(state)
|
||||
} else if (i > 256) {
|
||||
buffered.splice(0, i)
|
||||
state.bufferedIndex = 0
|
||||
} else {
|
||||
state.bufferedIndex = i
|
||||
}
|
||||
}
|
||||
state.bufferProcessing = false
|
||||
}
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
if (this._writev) {
|
||||
this._writev(
|
||||
[
|
||||
{
|
||||
chunk,
|
||||
encoding
|
||||
}
|
||||
],
|
||||
cb
|
||||
)
|
||||
} else {
|
||||
throw new ERR_METHOD_NOT_IMPLEMENTED('_write()')
|
||||
}
|
||||
}
|
||||
Writable.prototype._writev = null
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
const state = this._writableState
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk
|
||||
chunk = null
|
||||
encoding = null
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
encoding = null
|
||||
}
|
||||
let err
|
||||
if (chunk !== null && chunk !== undefined) {
|
||||
const ret = _write(this, chunk, encoding)
|
||||
if (ret instanceof Error) {
|
||||
err = ret
|
||||
}
|
||||
}
|
||||
|
||||
// .end() fully uncorks.
|
||||
if (state.corked) {
|
||||
state.corked = 1
|
||||
this.uncork()
|
||||
}
|
||||
if (err) {
|
||||
// Do nothing...
|
||||
} else if (!state.errored && !state.ending) {
|
||||
// This is forgiving in terms of unnecessary calls to end() and can hide
|
||||
// logic errors. However, usually such errors are harmless and causing a
|
||||
// hard error can be disproportionately destructive. It is not always
|
||||
// trivial for the user to determine whether end() needs to be called
|
||||
// or not.
|
||||
|
||||
state.ending = true
|
||||
finishMaybe(this, state, true)
|
||||
state.ended = true
|
||||
} else if (state.finished) {
|
||||
err = new ERR_STREAM_ALREADY_FINISHED('end')
|
||||
} else if (state.destroyed) {
|
||||
err = new ERR_STREAM_DESTROYED('end')
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
if (err || state.finished) {
|
||||
process.nextTick(cb, err)
|
||||
} else {
|
||||
state[kOnFinished].push(cb)
|
||||
}
|
||||
}
|
||||
return this
|
||||
}
|
||||
function needFinish(state) {
|
||||
return (
|
||||
state.ending &&
|
||||
!state.destroyed &&
|
||||
state.constructed &&
|
||||
state.length === 0 &&
|
||||
!state.errored &&
|
||||
state.buffered.length === 0 &&
|
||||
!state.finished &&
|
||||
!state.writing &&
|
||||
!state.errorEmitted &&
|
||||
!state.closeEmitted
|
||||
)
|
||||
}
|
||||
function callFinal(stream, state) {
|
||||
let called = false
|
||||
function onFinish(err) {
|
||||
if (called) {
|
||||
errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK())
|
||||
return
|
||||
}
|
||||
called = true
|
||||
state.pendingcb--
|
||||
if (err) {
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i](err)
|
||||
}
|
||||
errorOrDestroy(stream, err, state.sync)
|
||||
} else if (needFinish(state)) {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
// Backwards compat. Don't check state.sync here.
|
||||
// Some streams assume 'finish' will be emitted
|
||||
// asynchronously relative to _final callback.
|
||||
state.pendingcb++
|
||||
process.nextTick(finish, stream, state)
|
||||
}
|
||||
}
|
||||
state.sync = true
|
||||
state.pendingcb++
|
||||
try {
|
||||
stream._final(onFinish)
|
||||
} catch (err) {
|
||||
onFinish(err)
|
||||
}
|
||||
state.sync = false
|
||||
}
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||||
state.finalCalled = true
|
||||
callFinal(stream, state)
|
||||
} else {
|
||||
state.prefinished = true
|
||||
stream.emit('prefinish')
|
||||
}
|
||||
}
|
||||
}
|
||||
function finishMaybe(stream, state, sync) {
|
||||
if (needFinish(state)) {
|
||||
prefinish(stream, state)
|
||||
if (state.pendingcb === 0) {
|
||||
if (sync) {
|
||||
state.pendingcb++
|
||||
process.nextTick(
|
||||
(stream, state) => {
|
||||
if (needFinish(state)) {
|
||||
finish(stream, state)
|
||||
} else {
|
||||
state.pendingcb--
|
||||
}
|
||||
},
|
||||
stream,
|
||||
state
|
||||
)
|
||||
} else if (needFinish(state)) {
|
||||
state.pendingcb++
|
||||
finish(stream, state)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function finish(stream, state) {
|
||||
state.pendingcb--
|
||||
state.finished = true
|
||||
const onfinishCallbacks = state[kOnFinished].splice(0)
|
||||
for (let i = 0; i < onfinishCallbacks.length; i++) {
|
||||
onfinishCallbacks[i]()
|
||||
}
|
||||
stream.emit('finish')
|
||||
if (state.autoDestroy) {
|
||||
// In case of duplex streams we need a way to detect
|
||||
// if the readable side is ready for autoDestroy as well.
|
||||
const rState = stream._readableState
|
||||
const autoDestroy =
|
||||
!rState ||
|
||||
(rState.autoDestroy &&
|
||||
// We don't expect the readable to ever 'end'
|
||||
// if readable is explicitly set to false.
|
||||
(rState.endEmitted || rState.readable === false))
|
||||
if (autoDestroy) {
|
||||
stream.destroy()
|
||||
}
|
||||
}
|
||||
}
|
||||
ObjectDefineProperties(Writable.prototype, {
|
||||
closed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.closed : false
|
||||
}
|
||||
},
|
||||
destroyed: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.destroyed : false
|
||||
},
|
||||
set(value) {
|
||||
// Backward compatibility, the user is explicitly managing destroyed.
|
||||
if (this._writableState) {
|
||||
this._writableState.destroyed = value
|
||||
}
|
||||
}
|
||||
},
|
||||
writable: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const w = this._writableState
|
||||
// w.writable === false means that this is part of a Duplex stream
|
||||
// where the writable side was disabled upon construction.
|
||||
// Compat. The user might manually disable writable side through
|
||||
// deprecated setter.
|
||||
return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended
|
||||
},
|
||||
set(val) {
|
||||
// Backwards compatible.
|
||||
if (this._writableState) {
|
||||
this._writableState.writable = !!val
|
||||
}
|
||||
}
|
||||
},
|
||||
writableFinished: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.finished : false
|
||||
}
|
||||
},
|
||||
writableObjectMode: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.objectMode : false
|
||||
}
|
||||
},
|
||||
writableBuffer: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.getBuffer()
|
||||
}
|
||||
},
|
||||
writableEnded: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.ending : false
|
||||
}
|
||||
},
|
||||
writableNeedDrain: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
const wState = this._writableState
|
||||
if (!wState) return false
|
||||
return !wState.destroyed && !wState.ending && wState.needDrain
|
||||
}
|
||||
},
|
||||
writableHighWaterMark: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.highWaterMark
|
||||
}
|
||||
},
|
||||
writableCorked: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.corked : 0
|
||||
}
|
||||
},
|
||||
writableLength: {
|
||||
__proto__: null,
|
||||
get() {
|
||||
return this._writableState && this._writableState.length
|
||||
}
|
||||
},
|
||||
errored: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get() {
|
||||
return this._writableState ? this._writableState.errored : null
|
||||
}
|
||||
},
|
||||
writableAborted: {
|
||||
__proto__: null,
|
||||
enumerable: false,
|
||||
get: function () {
|
||||
return !!(
|
||||
this._writableState.writable !== false &&
|
||||
(this._writableState.destroyed || this._writableState.errored) &&
|
||||
!this._writableState.finished
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
const destroy = destroyImpl.destroy
|
||||
Writable.prototype.destroy = function (err, cb) {
|
||||
const state = this._writableState
|
||||
|
||||
// Invoke pending callbacks.
|
||||
if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) {
|
||||
process.nextTick(errorBuffer, state)
|
||||
}
|
||||
destroy.call(this, err, cb)
|
||||
return this
|
||||
}
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
cb(err)
|
||||
}
|
||||
Writable.prototype[EE.captureRejectionSymbol] = function (err) {
|
||||
this.destroy(err)
|
||||
}
|
||||
let webStreamsAdapters
|
||||
|
||||
// Lazy to avoid circular references
|
||||
function lazyWebStreams() {
|
||||
if (webStreamsAdapters === undefined) webStreamsAdapters = {}
|
||||
return webStreamsAdapters
|
||||
}
|
||||
Writable.fromWeb = function (writableStream, options) {
|
||||
return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options)
|
||||
}
|
||||
Writable.toWeb = function (streamWritable) {
|
||||
return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable)
|
||||
}
|
||||
530
node_modules/bl/node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
530
node_modules/bl/node_modules/readable-stream/lib/internal/validators.js
generated
vendored
Normal file
@ -0,0 +1,530 @@
|
||||
/* eslint jsdoc/require-jsdoc: "error" */
|
||||
|
||||
'use strict'
|
||||
|
||||
const {
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypeMap,
|
||||
NumberIsInteger,
|
||||
NumberIsNaN,
|
||||
NumberMAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER,
|
||||
NumberParseInt,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
RegExpPrototypeExec,
|
||||
String,
|
||||
StringPrototypeToUpperCase,
|
||||
StringPrototypeTrim
|
||||
} = require('../ours/primordials')
|
||||
const {
|
||||
hideStackFrames,
|
||||
codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL }
|
||||
} = require('../ours/errors')
|
||||
const { normalizeEncoding } = require('../ours/util')
|
||||
const { isAsyncFunction, isArrayBufferView } = require('../ours/util').types
|
||||
const signals = {}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isInt32(value) {
|
||||
return value === (value | 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} value
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isUint32(value) {
|
||||
return value === value >>> 0
|
||||
}
|
||||
const octalReg = /^[0-7]+$/
|
||||
const modeDesc = 'must be a 32-bit unsigned integer or an octal string'
|
||||
|
||||
/**
|
||||
* Parse and validate values that will be converted into mode_t (the S_*
|
||||
* constants). Only valid numbers and octal strings are allowed. They could be
|
||||
* converted to 32-bit unsigned integers or non-negative signed integers in the
|
||||
* C++ land, but any value higher than 0o777 will result in platform-specific
|
||||
* behaviors.
|
||||
* @param {*} value Values to be validated
|
||||
* @param {string} name Name of the argument
|
||||
* @param {number} [def] If specified, will be returned for invalid values
|
||||
* @returns {number}
|
||||
*/
|
||||
function parseFileMode(value, name, def) {
|
||||
if (typeof value === 'undefined') {
|
||||
value = def
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
if (RegExpPrototypeExec(octalReg, value) === null) {
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc)
|
||||
}
|
||||
value = NumberParseInt(value, 8)
|
||||
}
|
||||
validateUint32(value, name)
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateInteger
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInteger} */
|
||||
const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateInt32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateInt32} */
|
||||
const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => {
|
||||
// The defaults for min and max correspond to the limits of 32-bit integers.
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUint32
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number|boolean} [positive=false]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateUint32} */
|
||||
const validateUint32 = hideStackFrames((value, name, positive = false) => {
|
||||
if (typeof value !== 'number') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
}
|
||||
if (!NumberIsInteger(value)) {
|
||||
throw new ERR_OUT_OF_RANGE(name, 'an integer', value)
|
||||
}
|
||||
const min = positive ? 1 : 0
|
||||
// 2 ** 32 === 4294967296
|
||||
const max = 4294967295
|
||||
if (value < min || value > max) {
|
||||
throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateString
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string}
|
||||
*/
|
||||
|
||||
/** @type {validateString} */
|
||||
function validateString(value, name) {
|
||||
if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateNumber
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [min]
|
||||
* @param {number} [max]
|
||||
* @returns {asserts value is number}
|
||||
*/
|
||||
|
||||
/** @type {validateNumber} */
|
||||
function validateNumber(value, name, min = undefined, max) {
|
||||
if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value)
|
||||
if (
|
||||
(min != null && value < min) ||
|
||||
(max != null && value > max) ||
|
||||
((min != null || max != null) && NumberIsNaN(value))
|
||||
) {
|
||||
throw new ERR_OUT_OF_RANGE(
|
||||
name,
|
||||
`${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`,
|
||||
value
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateOneOf
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} oneOf
|
||||
*/
|
||||
|
||||
/** @type {validateOneOf} */
|
||||
const validateOneOf = hideStackFrames((value, name, oneOf) => {
|
||||
if (!ArrayPrototypeIncludes(oneOf, value)) {
|
||||
const allowed = ArrayPrototypeJoin(
|
||||
ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))),
|
||||
', '
|
||||
)
|
||||
const reason = 'must be one of: ' + allowed
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateBoolean
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean}
|
||||
*/
|
||||
|
||||
/** @type {validateBoolean} */
|
||||
function validateBoolean(value, name) {
|
||||
if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} options
|
||||
* @param {string} key
|
||||
* @param {boolean} defaultValue
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function getOwnPropertyValueOrDefault(options, key, defaultValue) {
|
||||
return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key]
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateObject
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {{
|
||||
* allowArray?: boolean,
|
||||
* allowFunction?: boolean,
|
||||
* nullable?: boolean
|
||||
* }} [options]
|
||||
*/
|
||||
|
||||
/** @type {validateObject} */
|
||||
const validateObject = hideStackFrames((value, name, options = null) => {
|
||||
const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false)
|
||||
const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false)
|
||||
const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false)
|
||||
if (
|
||||
(!nullable && value === null) ||
|
||||
(!allowArray && ArrayIsArray(value)) ||
|
||||
(typeof value !== 'object' && (!allowFunction || typeof value !== 'function'))
|
||||
) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Object', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateDictionary - We are using the Web IDL Standard definition
|
||||
* of "dictionary" here, which means any value
|
||||
* whose Type is either Undefined, Null, or
|
||||
* Object (which includes functions).
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @see https://webidl.spec.whatwg.org/#es-dictionary
|
||||
* @see https://tc39.es/ecma262/#table-typeof-operator-results
|
||||
*/
|
||||
|
||||
/** @type {validateDictionary} */
|
||||
const validateDictionary = hideStackFrames((value, name) => {
|
||||
if (value != null && typeof value !== 'object' && typeof value !== 'function') {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @param {number} [minLength]
|
||||
* @returns {asserts value is any[]}
|
||||
*/
|
||||
|
||||
/** @type {validateArray} */
|
||||
const validateArray = hideStackFrames((value, name, minLength = 0) => {
|
||||
if (!ArrayIsArray(value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'Array', value)
|
||||
}
|
||||
if (value.length < minLength) {
|
||||
const reason = `must be longer than ${minLength}`
|
||||
throw new ERR_INVALID_ARG_VALUE(name, value, reason)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateStringArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is string[]}
|
||||
*/
|
||||
|
||||
/** @type {validateStringArray} */
|
||||
function validateStringArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateString(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBooleanArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is boolean[]}
|
||||
*/
|
||||
|
||||
/** @type {validateBooleanArray} */
|
||||
function validateBooleanArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
validateBoolean(value[i], `${name}[${i}]`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignalArray
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is AbortSignal[]}
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignalArray} */
|
||||
function validateAbortSignalArray(value, name) {
|
||||
validateArray(value, name)
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const signal = value[i]
|
||||
const indexedName = `${name}[${i}]`
|
||||
if (signal == null) {
|
||||
throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, indexedName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} signal
|
||||
* @param {string} [name='signal']
|
||||
* @returns {asserts signal is keyof signals}
|
||||
*/
|
||||
function validateSignalName(signal, name = 'signal') {
|
||||
validateString(signal, name)
|
||||
if (signals[signal] === undefined) {
|
||||
if (signals[StringPrototypeToUpperCase(signal)] !== undefined) {
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)')
|
||||
}
|
||||
throw new ERR_UNKNOWN_SIGNAL(signal)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateBuffer
|
||||
* @param {*} buffer
|
||||
* @param {string} [name='buffer']
|
||||
* @returns {asserts buffer is ArrayBufferView}
|
||||
*/
|
||||
|
||||
/** @type {validateBuffer} */
|
||||
const validateBuffer = hideStackFrames((buffer, name = 'buffer') => {
|
||||
if (!isArrayBufferView(buffer)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {string} data
|
||||
* @param {string} encoding
|
||||
*/
|
||||
function validateEncoding(data, encoding) {
|
||||
const normalizedEncoding = normalizeEncoding(encoding)
|
||||
const length = data.length
|
||||
if (normalizedEncoding === 'hex' && length % 2 !== 0) {
|
||||
throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the port number is not NaN when coerced to a number,
|
||||
* is an integer and that it falls within the legal range of port numbers.
|
||||
* @param {*} port
|
||||
* @param {string} [name='Port']
|
||||
* @param {boolean} [allowZero=true]
|
||||
* @returns {number}
|
||||
*/
|
||||
function validatePort(port, name = 'Port', allowZero = true) {
|
||||
if (
|
||||
(typeof port !== 'number' && typeof port !== 'string') ||
|
||||
(typeof port === 'string' && StringPrototypeTrim(port).length === 0) ||
|
||||
+port !== +port >>> 0 ||
|
||||
port > 0xffff ||
|
||||
(port === 0 && !allowZero)
|
||||
) {
|
||||
throw new ERR_SOCKET_BAD_PORT(name, port, allowZero)
|
||||
}
|
||||
return port | 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @callback validateAbortSignal
|
||||
* @param {*} signal
|
||||
* @param {string} name
|
||||
*/
|
||||
|
||||
/** @type {validateAbortSignal} */
|
||||
const validateAbortSignal = hideStackFrames((signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validateFunction} */
|
||||
const validateFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validatePlainFunction
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is Function}
|
||||
*/
|
||||
|
||||
/** @type {validatePlainFunction} */
|
||||
const validatePlainFunction = hideStackFrames((value, name) => {
|
||||
if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @callback validateUndefined
|
||||
* @param {*} value
|
||||
* @param {string} name
|
||||
* @returns {asserts value is undefined}
|
||||
*/
|
||||
|
||||
/** @type {validateUndefined} */
|
||||
const validateUndefined = hideStackFrames((value, name) => {
|
||||
if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value)
|
||||
})
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T} value
|
||||
* @param {string} name
|
||||
* @param {T[]} union
|
||||
*/
|
||||
function validateUnion(value, name, union) {
|
||||
if (!ArrayPrototypeIncludes(union, value)) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
The rules for the Link header field are described here:
|
||||
https://www.rfc-editor.org/rfc/rfc8288.html#section-3
|
||||
|
||||
This regex validates any string surrounded by angle brackets
|
||||
(not necessarily a valid URI reference) followed by zero or more
|
||||
link-params separated by semicolons.
|
||||
*/
|
||||
const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/
|
||||
|
||||
/**
|
||||
* @param {any} value
|
||||
* @param {string} name
|
||||
*/
|
||||
function validateLinkHeaderFormat(value, name) {
|
||||
if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) {
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
name,
|
||||
value,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} hints
|
||||
* @return {string}
|
||||
*/
|
||||
function validateLinkHeaderValue(hints) {
|
||||
if (typeof hints === 'string') {
|
||||
validateLinkHeaderFormat(hints, 'hints')
|
||||
return hints
|
||||
} else if (ArrayIsArray(hints)) {
|
||||
const hintsLength = hints.length
|
||||
let result = ''
|
||||
if (hintsLength === 0) {
|
||||
return result
|
||||
}
|
||||
for (let i = 0; i < hintsLength; i++) {
|
||||
const link = hints[i]
|
||||
validateLinkHeaderFormat(link, 'hints')
|
||||
result += link
|
||||
if (i !== hintsLength - 1) {
|
||||
result += ', '
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
throw new ERR_INVALID_ARG_VALUE(
|
||||
'hints',
|
||||
hints,
|
||||
'must be an array or string of format "</styles.css>; rel=preload; as=style"'
|
||||
)
|
||||
}
|
||||
module.exports = {
|
||||
isInt32,
|
||||
isUint32,
|
||||
parseFileMode,
|
||||
validateArray,
|
||||
validateStringArray,
|
||||
validateBooleanArray,
|
||||
validateAbortSignalArray,
|
||||
validateBoolean,
|
||||
validateBuffer,
|
||||
validateDictionary,
|
||||
validateEncoding,
|
||||
validateFunction,
|
||||
validateInt32,
|
||||
validateInteger,
|
||||
validateNumber,
|
||||
validateObject,
|
||||
validateOneOf,
|
||||
validatePlainFunction,
|
||||
validatePort,
|
||||
validateSignalName,
|
||||
validateString,
|
||||
validateUint32,
|
||||
validateUndefined,
|
||||
validateUnion,
|
||||
validateAbortSignal,
|
||||
validateLinkHeaderValue
|
||||
}
|
||||
35
node_modules/bl/node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
35
node_modules/bl/node_modules/readable-stream/lib/ours/browser.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
||||
341
node_modules/bl/node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
341
node_modules/bl/node_modules/readable-stream/lib/ours/errors.js
generated
vendored
Normal file
@ -0,0 +1,341 @@
|
||||
'use strict'
|
||||
|
||||
const { format, inspect, AggregateError: CustomAggregateError } = require('./util')
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/errors.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/master/lib/internal/errors.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date (starting from E(...) definitions)
|
||||
with the upstream file.
|
||||
*/
|
||||
|
||||
const AggregateError = globalThis.AggregateError || CustomAggregateError
|
||||
const kIsNodeError = Symbol('kIsNodeError')
|
||||
const kTypes = [
|
||||
'string',
|
||||
'function',
|
||||
'number',
|
||||
'object',
|
||||
// Accept 'Function' and 'Object' as alternative to the lower cased version.
|
||||
'Function',
|
||||
'Object',
|
||||
'boolean',
|
||||
'bigint',
|
||||
'symbol'
|
||||
]
|
||||
const classRegExp = /^([A-Z][a-z0-9]*)+$/
|
||||
const nodeInternalPrefix = '__node_internal_'
|
||||
const codes = {}
|
||||
function assert(value, message) {
|
||||
if (!value) {
|
||||
throw new codes.ERR_INTERNAL_ASSERTION(message)
|
||||
}
|
||||
}
|
||||
|
||||
// Only use this for integers! Decimal numbers do not work with this function.
|
||||
function addNumericalSeparator(val) {
|
||||
let res = ''
|
||||
let i = val.length
|
||||
const start = val[0] === '-' ? 1 : 0
|
||||
for (; i >= start + 4; i -= 3) {
|
||||
res = `_${val.slice(i - 3, i)}${res}`
|
||||
}
|
||||
return `${val.slice(0, i)}${res}`
|
||||
}
|
||||
function getMessage(key, msg, args) {
|
||||
if (typeof msg === 'function') {
|
||||
assert(
|
||||
msg.length <= args.length,
|
||||
// Default options do not count.
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).`
|
||||
)
|
||||
return msg(...args)
|
||||
}
|
||||
const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length
|
||||
assert(
|
||||
expectedLength === args.length,
|
||||
`Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).`
|
||||
)
|
||||
if (args.length === 0) {
|
||||
return msg
|
||||
}
|
||||
return format(msg, ...args)
|
||||
}
|
||||
function E(code, message, Base) {
|
||||
if (!Base) {
|
||||
Base = Error
|
||||
}
|
||||
class NodeError extends Base {
|
||||
constructor(...args) {
|
||||
super(getMessage(code, message, args))
|
||||
}
|
||||
toString() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
}
|
||||
}
|
||||
Object.defineProperties(NodeError.prototype, {
|
||||
name: {
|
||||
value: Base.name,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
},
|
||||
toString: {
|
||||
value() {
|
||||
return `${this.name} [${code}]: ${this.message}`
|
||||
},
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
}
|
||||
})
|
||||
NodeError.prototype.code = code
|
||||
NodeError.prototype[kIsNodeError] = true
|
||||
codes[code] = NodeError
|
||||
}
|
||||
function hideStackFrames(fn) {
|
||||
// We rename the functions that will be hidden to cut off the stacktrace
|
||||
// at the outermost one
|
||||
const hidden = nodeInternalPrefix + fn.name
|
||||
Object.defineProperty(fn, 'name', {
|
||||
value: hidden
|
||||
})
|
||||
return fn
|
||||
}
|
||||
function aggregateTwoErrors(innerError, outerError) {
|
||||
if (innerError && outerError && innerError !== outerError) {
|
||||
if (Array.isArray(outerError.errors)) {
|
||||
// If `outerError` is already an `AggregateError`.
|
||||
outerError.errors.push(innerError)
|
||||
return outerError
|
||||
}
|
||||
const err = new AggregateError([outerError, innerError], outerError.message)
|
||||
err.code = outerError.code
|
||||
return err
|
||||
}
|
||||
return innerError || outerError
|
||||
}
|
||||
class AbortError extends Error {
|
||||
constructor(message = 'The operation was aborted', options = undefined) {
|
||||
if (options !== undefined && typeof options !== 'object') {
|
||||
throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options)
|
||||
}
|
||||
super(message, options)
|
||||
this.code = 'ABORT_ERR'
|
||||
this.name = 'AbortError'
|
||||
}
|
||||
}
|
||||
E('ERR_ASSERTION', '%s', Error)
|
||||
E(
|
||||
'ERR_INVALID_ARG_TYPE',
|
||||
(name, expected, actual) => {
|
||||
assert(typeof name === 'string', "'name' must be a string")
|
||||
if (!Array.isArray(expected)) {
|
||||
expected = [expected]
|
||||
}
|
||||
let msg = 'The '
|
||||
if (name.endsWith(' argument')) {
|
||||
// For cases like 'first argument'
|
||||
msg += `${name} `
|
||||
} else {
|
||||
msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} `
|
||||
}
|
||||
msg += 'must be '
|
||||
const types = []
|
||||
const instances = []
|
||||
const other = []
|
||||
for (const value of expected) {
|
||||
assert(typeof value === 'string', 'All expected entries have to be of type string')
|
||||
if (kTypes.includes(value)) {
|
||||
types.push(value.toLowerCase())
|
||||
} else if (classRegExp.test(value)) {
|
||||
instances.push(value)
|
||||
} else {
|
||||
assert(value !== 'object', 'The value "object" should be written as "Object"')
|
||||
other.push(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Special handle `object` in case other instances are allowed to outline
|
||||
// the differences between each other.
|
||||
if (instances.length > 0) {
|
||||
const pos = types.indexOf('object')
|
||||
if (pos !== -1) {
|
||||
types.splice(types, pos, 1)
|
||||
instances.push('Object')
|
||||
}
|
||||
}
|
||||
if (types.length > 0) {
|
||||
switch (types.length) {
|
||||
case 1:
|
||||
msg += `of type ${types[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of type ${types[0]} or ${types[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = types.pop()
|
||||
msg += `one of type ${types.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (instances.length > 0 || other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
if (instances.length > 0) {
|
||||
switch (instances.length) {
|
||||
case 1:
|
||||
msg += `an instance of ${instances[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `an instance of ${instances[0]} or ${instances[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = instances.pop()
|
||||
msg += `an instance of ${instances.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (other.length > 0) {
|
||||
msg += ' or '
|
||||
}
|
||||
}
|
||||
switch (other.length) {
|
||||
case 0:
|
||||
break
|
||||
case 1:
|
||||
if (other[0].toLowerCase() !== other[0]) {
|
||||
msg += 'an '
|
||||
}
|
||||
msg += `${other[0]}`
|
||||
break
|
||||
case 2:
|
||||
msg += `one of ${other[0]} or ${other[1]}`
|
||||
break
|
||||
default: {
|
||||
const last = other.pop()
|
||||
msg += `one of ${other.join(', ')}, or ${last}`
|
||||
}
|
||||
}
|
||||
if (actual == null) {
|
||||
msg += `. Received ${actual}`
|
||||
} else if (typeof actual === 'function' && actual.name) {
|
||||
msg += `. Received function ${actual.name}`
|
||||
} else if (typeof actual === 'object') {
|
||||
var _actual$constructor
|
||||
if (
|
||||
(_actual$constructor = actual.constructor) !== null &&
|
||||
_actual$constructor !== undefined &&
|
||||
_actual$constructor.name
|
||||
) {
|
||||
msg += `. Received an instance of ${actual.constructor.name}`
|
||||
} else {
|
||||
const inspected = inspect(actual, {
|
||||
depth: -1
|
||||
})
|
||||
msg += `. Received ${inspected}`
|
||||
}
|
||||
} else {
|
||||
let inspected = inspect(actual, {
|
||||
colors: false
|
||||
})
|
||||
if (inspected.length > 25) {
|
||||
inspected = `${inspected.slice(0, 25)}...`
|
||||
}
|
||||
msg += `. Received type ${typeof actual} (${inspected})`
|
||||
}
|
||||
return msg
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_ARG_VALUE',
|
||||
(name, value, reason = 'is invalid') => {
|
||||
let inspected = inspect(value)
|
||||
if (inspected.length > 128) {
|
||||
inspected = inspected.slice(0, 128) + '...'
|
||||
}
|
||||
const type = name.includes('.') ? 'property' : 'argument'
|
||||
return `The ${type} '${name}' ${reason}. Received ${inspected}`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_INVALID_RETURN_VALUE',
|
||||
(input, name, value) => {
|
||||
var _value$constructor
|
||||
const type =
|
||||
value !== null &&
|
||||
value !== undefined &&
|
||||
(_value$constructor = value.constructor) !== null &&
|
||||
_value$constructor !== undefined &&
|
||||
_value$constructor.name
|
||||
? `instance of ${value.constructor.name}`
|
||||
: `type ${typeof value}`
|
||||
return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_MISSING_ARGS',
|
||||
(...args) => {
|
||||
assert(args.length > 0, 'At least one arg needs to be specified')
|
||||
let msg
|
||||
const len = args.length
|
||||
args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ')
|
||||
switch (len) {
|
||||
case 1:
|
||||
msg += `The ${args[0]} argument`
|
||||
break
|
||||
case 2:
|
||||
msg += `The ${args[0]} and ${args[1]} arguments`
|
||||
break
|
||||
default:
|
||||
{
|
||||
const last = args.pop()
|
||||
msg += `The ${args.join(', ')}, and ${last} arguments`
|
||||
}
|
||||
break
|
||||
}
|
||||
return `${msg} must be specified`
|
||||
},
|
||||
TypeError
|
||||
)
|
||||
E(
|
||||
'ERR_OUT_OF_RANGE',
|
||||
(str, range, input) => {
|
||||
assert(range, 'Missing "range" argument')
|
||||
let received
|
||||
if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) {
|
||||
received = addNumericalSeparator(String(input))
|
||||
} else if (typeof input === 'bigint') {
|
||||
received = String(input)
|
||||
if (input > 2n ** 32n || input < -(2n ** 32n)) {
|
||||
received = addNumericalSeparator(received)
|
||||
}
|
||||
received += 'n'
|
||||
} else {
|
||||
received = inspect(input)
|
||||
}
|
||||
return `The value of "${str}" is out of range. It must be ${range}. Received ${received}`
|
||||
},
|
||||
RangeError
|
||||
)
|
||||
E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error)
|
||||
E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error)
|
||||
E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error)
|
||||
E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error)
|
||||
E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error)
|
||||
E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError)
|
||||
E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error)
|
||||
E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error)
|
||||
E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error)
|
||||
E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error)
|
||||
E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError)
|
||||
module.exports = {
|
||||
AbortError,
|
||||
aggregateTwoErrors: hideStackFrames(aggregateTwoErrors),
|
||||
hideStackFrames,
|
||||
codes
|
||||
}
|
||||
65
node_modules/bl/node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
65
node_modules/bl/node_modules/readable-stream/lib/ours/index.js
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
'use strict'
|
||||
|
||||
const Stream = require('stream')
|
||||
if (Stream && process.env.READABLE_STREAM === 'disable') {
|
||||
const promises = Stream.promises
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = Stream._isUint8Array
|
||||
module.exports.isDisturbed = Stream.isDisturbed
|
||||
module.exports.isErrored = Stream.isErrored
|
||||
module.exports.isReadable = Stream.isReadable
|
||||
module.exports.Readable = Stream.Readable
|
||||
module.exports.Writable = Stream.Writable
|
||||
module.exports.Duplex = Stream.Duplex
|
||||
module.exports.Transform = Stream.Transform
|
||||
module.exports.PassThrough = Stream.PassThrough
|
||||
module.exports.addAbortSignal = Stream.addAbortSignal
|
||||
module.exports.finished = Stream.finished
|
||||
module.exports.destroy = Stream.destroy
|
||||
module.exports.pipeline = Stream.pipeline
|
||||
module.exports.compose = Stream.compose
|
||||
Object.defineProperty(Stream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = Stream.Stream
|
||||
} else {
|
||||
const CustomStream = require('../stream')
|
||||
const promises = require('../stream/promises')
|
||||
const originalDestroy = CustomStream.Readable.destroy
|
||||
module.exports = CustomStream.Readable
|
||||
|
||||
// Explicit export naming is needed for ESM
|
||||
module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer
|
||||
module.exports._isUint8Array = CustomStream._isUint8Array
|
||||
module.exports.isDisturbed = CustomStream.isDisturbed
|
||||
module.exports.isErrored = CustomStream.isErrored
|
||||
module.exports.isReadable = CustomStream.isReadable
|
||||
module.exports.Readable = CustomStream.Readable
|
||||
module.exports.Writable = CustomStream.Writable
|
||||
module.exports.Duplex = CustomStream.Duplex
|
||||
module.exports.Transform = CustomStream.Transform
|
||||
module.exports.PassThrough = CustomStream.PassThrough
|
||||
module.exports.addAbortSignal = CustomStream.addAbortSignal
|
||||
module.exports.finished = CustomStream.finished
|
||||
module.exports.destroy = CustomStream.destroy
|
||||
module.exports.destroy = originalDestroy
|
||||
module.exports.pipeline = CustomStream.pipeline
|
||||
module.exports.compose = CustomStream.compose
|
||||
Object.defineProperty(CustomStream, 'promises', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
module.exports.Stream = CustomStream.Stream
|
||||
}
|
||||
|
||||
// Allow default importing
|
||||
module.exports.default = module.exports
|
||||
107
node_modules/bl/node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
107
node_modules/bl/node_modules/readable-stream/lib/ours/primordials.js
generated
vendored
Normal file
@ -0,0 +1,107 @@
|
||||
'use strict'
|
||||
|
||||
/*
|
||||
This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at
|
||||
|
||||
https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js
|
||||
|
||||
Don't try to replace with the original file and keep it up to date with the upstream file.
|
||||
*/
|
||||
module.exports = {
|
||||
ArrayIsArray(self) {
|
||||
return Array.isArray(self)
|
||||
},
|
||||
ArrayPrototypeIncludes(self, el) {
|
||||
return self.includes(el)
|
||||
},
|
||||
ArrayPrototypeIndexOf(self, el) {
|
||||
return self.indexOf(el)
|
||||
},
|
||||
ArrayPrototypeJoin(self, sep) {
|
||||
return self.join(sep)
|
||||
},
|
||||
ArrayPrototypeMap(self, fn) {
|
||||
return self.map(fn)
|
||||
},
|
||||
ArrayPrototypePop(self, el) {
|
||||
return self.pop(el)
|
||||
},
|
||||
ArrayPrototypePush(self, el) {
|
||||
return self.push(el)
|
||||
},
|
||||
ArrayPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
Error,
|
||||
FunctionPrototypeCall(fn, thisArgs, ...args) {
|
||||
return fn.call(thisArgs, ...args)
|
||||
},
|
||||
FunctionPrototypeSymbolHasInstance(self, instance) {
|
||||
return Function.prototype[Symbol.hasInstance].call(self, instance)
|
||||
},
|
||||
MathFloor: Math.floor,
|
||||
Number,
|
||||
NumberIsInteger: Number.isInteger,
|
||||
NumberIsNaN: Number.isNaN,
|
||||
NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER,
|
||||
NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER,
|
||||
NumberParseInt: Number.parseInt,
|
||||
ObjectDefineProperties(self, props) {
|
||||
return Object.defineProperties(self, props)
|
||||
},
|
||||
ObjectDefineProperty(self, name, prop) {
|
||||
return Object.defineProperty(self, name, prop)
|
||||
},
|
||||
ObjectGetOwnPropertyDescriptor(self, name) {
|
||||
return Object.getOwnPropertyDescriptor(self, name)
|
||||
},
|
||||
ObjectKeys(obj) {
|
||||
return Object.keys(obj)
|
||||
},
|
||||
ObjectSetPrototypeOf(target, proto) {
|
||||
return Object.setPrototypeOf(target, proto)
|
||||
},
|
||||
Promise,
|
||||
PromisePrototypeCatch(self, fn) {
|
||||
return self.catch(fn)
|
||||
},
|
||||
PromisePrototypeThen(self, thenFn, catchFn) {
|
||||
return self.then(thenFn, catchFn)
|
||||
},
|
||||
PromiseReject(err) {
|
||||
return Promise.reject(err)
|
||||
},
|
||||
PromiseResolve(val) {
|
||||
return Promise.resolve(val)
|
||||
},
|
||||
ReflectApply: Reflect.apply,
|
||||
RegExpPrototypeTest(self, value) {
|
||||
return self.test(value)
|
||||
},
|
||||
SafeSet: Set,
|
||||
String,
|
||||
StringPrototypeSlice(self, start, end) {
|
||||
return self.slice(start, end)
|
||||
},
|
||||
StringPrototypeToLowerCase(self) {
|
||||
return self.toLowerCase()
|
||||
},
|
||||
StringPrototypeToUpperCase(self) {
|
||||
return self.toUpperCase()
|
||||
},
|
||||
StringPrototypeTrim(self) {
|
||||
return self.trim()
|
||||
},
|
||||
Symbol,
|
||||
SymbolFor: Symbol.for,
|
||||
SymbolAsyncIterator: Symbol.asyncIterator,
|
||||
SymbolHasInstance: Symbol.hasInstance,
|
||||
SymbolIterator: Symbol.iterator,
|
||||
SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'),
|
||||
SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'),
|
||||
TypedArrayPrototypeSet(self, buf, len) {
|
||||
return self.set(buf, len)
|
||||
},
|
||||
Boolean: Boolean,
|
||||
Uint8Array
|
||||
}
|
||||
200
node_modules/bl/node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
200
node_modules/bl/node_modules/readable-stream/lib/ours/util.js
generated
vendored
Normal file
@ -0,0 +1,200 @@
|
||||
'use strict'
|
||||
|
||||
const bufferModule = require('buffer')
|
||||
const { kResistStopPropagation, SymbolDispose } = require('./primordials')
|
||||
const AbortSignal = globalThis.AbortSignal || require('abort-controller').AbortSignal
|
||||
const AbortController = globalThis.AbortController || require('abort-controller').AbortController
|
||||
const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor
|
||||
const Blob = globalThis.Blob || bufferModule.Blob
|
||||
/* eslint-disable indent */
|
||||
const isBlob =
|
||||
typeof Blob !== 'undefined'
|
||||
? function isBlob(b) {
|
||||
// eslint-disable-next-line indent
|
||||
return b instanceof Blob
|
||||
}
|
||||
: function isBlob(b) {
|
||||
return false
|
||||
}
|
||||
/* eslint-enable indent */
|
||||
|
||||
const validateAbortSignal = (signal, name) => {
|
||||
if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) {
|
||||
throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal)
|
||||
}
|
||||
}
|
||||
const validateFunction = (value, name) => {
|
||||
if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value)
|
||||
}
|
||||
|
||||
// This is a simplified version of AggregateError
|
||||
class AggregateError extends Error {
|
||||
constructor(errors) {
|
||||
if (!Array.isArray(errors)) {
|
||||
throw new TypeError(`Expected input to be an Array, got ${typeof errors}`)
|
||||
}
|
||||
let message = ''
|
||||
for (let i = 0; i < errors.length; i++) {
|
||||
message += ` ${errors[i].stack}\n`
|
||||
}
|
||||
super(message)
|
||||
this.name = 'AggregateError'
|
||||
this.errors = errors
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
AggregateError,
|
||||
kEmptyObject: Object.freeze({}),
|
||||
once(callback) {
|
||||
let called = false
|
||||
return function (...args) {
|
||||
if (called) {
|
||||
return
|
||||
}
|
||||
called = true
|
||||
callback.apply(this, args)
|
||||
}
|
||||
},
|
||||
createDeferredPromise: function () {
|
||||
let resolve
|
||||
let reject
|
||||
|
||||
// eslint-disable-next-line promise/param-names
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
return {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
},
|
||||
promisify(fn) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fn((err, ...args) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
return resolve(...args)
|
||||
})
|
||||
})
|
||||
},
|
||||
debuglog() {
|
||||
return function () {}
|
||||
},
|
||||
format(format, ...args) {
|
||||
// Simplified version of https://nodejs.org/api/util.html#utilformatformat-args
|
||||
return format.replace(/%([sdifj])/g, function (...[_unused, type]) {
|
||||
const replacement = args.shift()
|
||||
if (type === 'f') {
|
||||
return replacement.toFixed(6)
|
||||
} else if (type === 'j') {
|
||||
return JSON.stringify(replacement)
|
||||
} else if (type === 's' && typeof replacement === 'object') {
|
||||
const ctor = replacement.constructor !== Object ? replacement.constructor.name : ''
|
||||
return `${ctor} {}`.trim()
|
||||
} else {
|
||||
return replacement.toString()
|
||||
}
|
||||
})
|
||||
},
|
||||
inspect(value) {
|
||||
// Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
if (value.includes("'")) {
|
||||
if (!value.includes('"')) {
|
||||
return `"${value}"`
|
||||
} else if (!value.includes('`') && !value.includes('${')) {
|
||||
return `\`${value}\``
|
||||
}
|
||||
}
|
||||
return `'${value}'`
|
||||
case 'number':
|
||||
if (isNaN(value)) {
|
||||
return 'NaN'
|
||||
} else if (Object.is(value, -0)) {
|
||||
return String(value)
|
||||
}
|
||||
return value
|
||||
case 'bigint':
|
||||
return `${String(value)}n`
|
||||
case 'boolean':
|
||||
case 'undefined':
|
||||
return String(value)
|
||||
case 'object':
|
||||
return '{}'
|
||||
}
|
||||
},
|
||||
types: {
|
||||
isAsyncFunction(fn) {
|
||||
return fn instanceof AsyncFunction
|
||||
},
|
||||
isArrayBufferView(arr) {
|
||||
return ArrayBuffer.isView(arr)
|
||||
}
|
||||
},
|
||||
isBlob,
|
||||
deprecate(fn, message) {
|
||||
return fn
|
||||
},
|
||||
addAbortListener:
|
||||
require('events').addAbortListener ||
|
||||
function addAbortListener(signal, listener) {
|
||||
if (signal === undefined) {
|
||||
throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal)
|
||||
}
|
||||
validateAbortSignal(signal, 'signal')
|
||||
validateFunction(listener, 'listener')
|
||||
let removeEventListener
|
||||
if (signal.aborted) {
|
||||
queueMicrotask(() => listener())
|
||||
} else {
|
||||
signal.addEventListener('abort', listener, {
|
||||
__proto__: null,
|
||||
once: true,
|
||||
[kResistStopPropagation]: true
|
||||
})
|
||||
removeEventListener = () => {
|
||||
signal.removeEventListener('abort', listener)
|
||||
}
|
||||
}
|
||||
return {
|
||||
__proto__: null,
|
||||
[SymbolDispose]() {
|
||||
var _removeEventListener
|
||||
;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined
|
||||
? undefined
|
||||
: _removeEventListener()
|
||||
}
|
||||
}
|
||||
},
|
||||
AbortSignalAny:
|
||||
AbortSignal.any ||
|
||||
function AbortSignalAny(signals) {
|
||||
// Fast path if there is only one signal.
|
||||
if (signals.length === 1) {
|
||||
return signals[0]
|
||||
}
|
||||
const ac = new AbortController()
|
||||
const abort = () => ac.abort()
|
||||
signals.forEach((signal) => {
|
||||
validateAbortSignal(signal, 'signals')
|
||||
signal.addEventListener('abort', abort, {
|
||||
once: true
|
||||
})
|
||||
})
|
||||
ac.signal.addEventListener(
|
||||
'abort',
|
||||
() => {
|
||||
signals.forEach((signal) => signal.removeEventListener('abort', abort))
|
||||
},
|
||||
{
|
||||
once: true
|
||||
}
|
||||
)
|
||||
return ac.signal
|
||||
}
|
||||
}
|
||||
module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom')
|
||||
141
node_modules/bl/node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
141
node_modules/bl/node_modules/readable-stream/lib/stream.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
/* replacement start */
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
/* replacement end */
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
;('use strict')
|
||||
const { ObjectDefineProperty, ObjectKeys, ReflectApply } = require('./ours/primordials')
|
||||
const {
|
||||
promisify: { custom: customPromisify }
|
||||
} = require('./ours/util')
|
||||
const { streamReturningOperators, promiseReturningOperators } = require('./internal/streams/operators')
|
||||
const {
|
||||
codes: { ERR_ILLEGAL_CONSTRUCTOR }
|
||||
} = require('./ours/errors')
|
||||
const compose = require('./internal/streams/compose')
|
||||
const { setDefaultHighWaterMark, getDefaultHighWaterMark } = require('./internal/streams/state')
|
||||
const { pipeline } = require('./internal/streams/pipeline')
|
||||
const { destroyer } = require('./internal/streams/destroy')
|
||||
const eos = require('./internal/streams/end-of-stream')
|
||||
const internalBuffer = {}
|
||||
const promises = require('./stream/promises')
|
||||
const utils = require('./internal/streams/utils')
|
||||
const Stream = (module.exports = require('./internal/streams/legacy').Stream)
|
||||
Stream.isDestroyed = utils.isDestroyed
|
||||
Stream.isDisturbed = utils.isDisturbed
|
||||
Stream.isErrored = utils.isErrored
|
||||
Stream.isReadable = utils.isReadable
|
||||
Stream.isWritable = utils.isWritable
|
||||
Stream.Readable = require('./internal/streams/readable')
|
||||
for (const key of ObjectKeys(streamReturningOperators)) {
|
||||
const op = streamReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return Stream.Readable.from(ReflectApply(op, this, args))
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
for (const key of ObjectKeys(promiseReturningOperators)) {
|
||||
const op = promiseReturningOperators[key]
|
||||
function fn(...args) {
|
||||
if (new.target) {
|
||||
throw ERR_ILLEGAL_CONSTRUCTOR()
|
||||
}
|
||||
return ReflectApply(op, this, args)
|
||||
}
|
||||
ObjectDefineProperty(fn, 'name', {
|
||||
__proto__: null,
|
||||
value: op.name
|
||||
})
|
||||
ObjectDefineProperty(fn, 'length', {
|
||||
__proto__: null,
|
||||
value: op.length
|
||||
})
|
||||
ObjectDefineProperty(Stream.Readable.prototype, key, {
|
||||
__proto__: null,
|
||||
value: fn,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
Stream.Writable = require('./internal/streams/writable')
|
||||
Stream.Duplex = require('./internal/streams/duplex')
|
||||
Stream.Transform = require('./internal/streams/transform')
|
||||
Stream.PassThrough = require('./internal/streams/passthrough')
|
||||
Stream.pipeline = pipeline
|
||||
const { addAbortSignal } = require('./internal/streams/add-abort-signal')
|
||||
Stream.addAbortSignal = addAbortSignal
|
||||
Stream.finished = eos
|
||||
Stream.destroy = destroyer
|
||||
Stream.compose = compose
|
||||
Stream.setDefaultHighWaterMark = setDefaultHighWaterMark
|
||||
Stream.getDefaultHighWaterMark = getDefaultHighWaterMark
|
||||
ObjectDefineProperty(Stream, 'promises', {
|
||||
__proto__: null,
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(pipeline, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.pipeline
|
||||
}
|
||||
})
|
||||
ObjectDefineProperty(eos, customPromisify, {
|
||||
__proto__: null,
|
||||
enumerable: true,
|
||||
get() {
|
||||
return promises.finished
|
||||
}
|
||||
})
|
||||
|
||||
// Backwards-compat with node 0.4.x
|
||||
Stream.Stream = Stream
|
||||
Stream._isUint8Array = function isUint8Array(value) {
|
||||
return value instanceof Uint8Array
|
||||
}
|
||||
Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
}
|
||||
43
node_modules/bl/node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
43
node_modules/bl/node_modules/readable-stream/lib/stream/promises.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
const { ArrayPrototypePop, Promise } = require('../ours/primordials')
|
||||
const { isIterable, isNodeStream, isWebStream } = require('../internal/streams/utils')
|
||||
const { pipelineImpl: pl } = require('../internal/streams/pipeline')
|
||||
const { finished } = require('../internal/streams/end-of-stream')
|
||||
require('../../lib/stream.js')
|
||||
function pipeline(...streams) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let signal
|
||||
let end
|
||||
const lastArg = streams[streams.length - 1]
|
||||
if (
|
||||
lastArg &&
|
||||
typeof lastArg === 'object' &&
|
||||
!isNodeStream(lastArg) &&
|
||||
!isIterable(lastArg) &&
|
||||
!isWebStream(lastArg)
|
||||
) {
|
||||
const options = ArrayPrototypePop(streams)
|
||||
signal = options.signal
|
||||
end = options.end
|
||||
}
|
||||
pl(
|
||||
streams,
|
||||
(err, value) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve(value)
|
||||
}
|
||||
},
|
||||
{
|
||||
signal,
|
||||
end
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
module.exports = {
|
||||
finished,
|
||||
pipeline
|
||||
}
|
||||
86
node_modules/bl/node_modules/readable-stream/package.json
generated
vendored
Normal file
86
node_modules/bl/node_modules/readable-stream/package.json
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
{
|
||||
"name": "readable-stream",
|
||||
"version": "4.5.2",
|
||||
"description": "Node.js Streams, a user-land copy of the stream library from Node.js",
|
||||
"homepage": "https://github.com/nodejs/readable-stream",
|
||||
"license": "MIT",
|
||||
"licenses": [
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://choosealicense.com/licenses/mit/"
|
||||
}
|
||||
],
|
||||
"keywords": [
|
||||
"readable",
|
||||
"stream",
|
||||
"pipe"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/nodejs/readable-stream"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/nodejs/readable-stream/issues"
|
||||
},
|
||||
"main": "lib/ours/index.js",
|
||||
"files": [
|
||||
"lib",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"browser": {
|
||||
"util": "./lib/ours/util.js",
|
||||
"./lib/ours/index.js": "./lib/ours/browser.js"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node build/build.mjs",
|
||||
"postbuild": "prettier -w lib test",
|
||||
"test": "tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"test:prepare": "node test/browser/runner-prepare.mjs",
|
||||
"test:browsers": "node test/browser/runner-browser.mjs",
|
||||
"test:bundlers": "node test/browser/runner-node.mjs",
|
||||
"test:readable-stream-only": "node readable-stream-test/runner-prepare.mjs",
|
||||
"coverage": "c8 -c ./c8.json tap --rcfile=./tap.yml test/parallel/test-*.js test/ours/test-*.js",
|
||||
"format": "prettier -w src lib test",
|
||||
"lint": "eslint src"
|
||||
},
|
||||
"dependencies": {
|
||||
"abort-controller": "^3.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"events": "^3.3.0",
|
||||
"process": "^0.11.10",
|
||||
"string_decoder": "^1.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.17.10",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.16.7",
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.16.7",
|
||||
"@rollup/plugin-commonjs": "^22.0.0",
|
||||
"@rollup/plugin-inject": "^4.0.4",
|
||||
"@rollup/plugin-node-resolve": "^13.3.0",
|
||||
"@sinonjs/fake-timers": "^9.1.2",
|
||||
"browserify": "^17.0.0",
|
||||
"c8": "^7.11.2",
|
||||
"esbuild": "^0.19.9",
|
||||
"esbuild-plugin-alias": "^0.2.1",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "^15.2.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"playwright": "^1.21.1",
|
||||
"prettier": "^2.6.2",
|
||||
"rollup": "^2.72.1",
|
||||
"rollup-plugin-polyfill-node": "^0.9.0",
|
||||
"tap": "^16.2.0",
|
||||
"tap-mocha-reporter": "^5.0.3",
|
||||
"tape": "^5.5.3",
|
||||
"tar": "^6.1.11",
|
||||
"undici": "^5.1.1",
|
||||
"webpack": "^5.72.1",
|
||||
"webpack-cli": "^4.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
}
|
||||
123
node_modules/bl/package.json
generated
vendored
Normal file
123
node_modules/bl/package.json
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
{
|
||||
"name": "bl",
|
||||
"version": "6.0.14",
|
||||
"description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
|
||||
"license": "MIT",
|
||||
"main": "bl.js",
|
||||
"scripts": {
|
||||
"lint": "standard *.js test/*.js",
|
||||
"test": "npm run lint && npm run test:types && node test/test.js | faucet",
|
||||
"test:ci": "npm run lint && node test/test.js && npm run test:types",
|
||||
"test:types": "tsc --target esnext --moduleResolution node --allowJs --noEmit test/test.js",
|
||||
"build": "true"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/rvagg/bl.git"
|
||||
},
|
||||
"homepage": "https://github.com/rvagg/bl",
|
||||
"authors": [
|
||||
"Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
|
||||
"Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
|
||||
"Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
|
||||
],
|
||||
"keywords": [
|
||||
"buffer",
|
||||
"buffers",
|
||||
"stream",
|
||||
"awesomesauce"
|
||||
],
|
||||
"dependencies": {
|
||||
"@types/readable-stream": "^4.0.0",
|
||||
"buffer": "^6.0.3",
|
||||
"inherits": "^2.0.4",
|
||||
"readable-stream": "^4.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"faucet": "~0.0.1",
|
||||
"standard": "^17.0.0",
|
||||
"tape": "^5.2.2",
|
||||
"typescript": "~5.5.2"
|
||||
},
|
||||
"release": {
|
||||
"branches": [
|
||||
"master"
|
||||
],
|
||||
"plugins": [
|
||||
[
|
||||
"@semantic-release/commit-analyzer",
|
||||
{
|
||||
"preset": "conventionalcommits",
|
||||
"releaseRules": [
|
||||
{
|
||||
"breaking": true,
|
||||
"release": "major"
|
||||
},
|
||||
{
|
||||
"revert": true,
|
||||
"release": "patch"
|
||||
},
|
||||
{
|
||||
"type": "feat",
|
||||
"release": "minor"
|
||||
},
|
||||
{
|
||||
"type": "fix",
|
||||
"release": "patch"
|
||||
},
|
||||
{
|
||||
"type": "chore",
|
||||
"release": "patch"
|
||||
},
|
||||
{
|
||||
"type": "docs",
|
||||
"release": "patch"
|
||||
},
|
||||
{
|
||||
"type": "test",
|
||||
"release": "patch"
|
||||
},
|
||||
{
|
||||
"scope": "no-release",
|
||||
"release": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/release-notes-generator",
|
||||
{
|
||||
"preset": "conventionalcommits",
|
||||
"presetConfig": {
|
||||
"types": [
|
||||
{
|
||||
"type": "feat",
|
||||
"section": "Features"
|
||||
},
|
||||
{
|
||||
"type": "fix",
|
||||
"section": "Bug Fixes"
|
||||
},
|
||||
{
|
||||
"type": "chore",
|
||||
"section": "Trivial Changes"
|
||||
},
|
||||
{
|
||||
"type": "docs",
|
||||
"section": "Trivial Changes"
|
||||
},
|
||||
{
|
||||
"type": "test",
|
||||
"section": "Tests"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"@semantic-release/changelog",
|
||||
"@semantic-release/npm",
|
||||
"@semantic-release/github",
|
||||
"@semantic-release/git"
|
||||
]
|
||||
}
|
||||
}
|
||||
21
node_modules/bl/test/convert.js
generated
vendored
Normal file
21
node_modules/bl/test/convert.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
'use strict'
|
||||
|
||||
const tape = require('tape')
|
||||
const { BufferList, BufferListStream } = require('../')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
tape('convert from BufferList to BufferListStream', (t) => {
|
||||
const data = Buffer.from(`TEST-${Date.now()}`)
|
||||
const bl = new BufferList(data)
|
||||
const bls = new BufferListStream(bl)
|
||||
t.ok(bl.slice().equals(bls.slice()))
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('convert from BufferListStream to BufferList', (t) => {
|
||||
const data = Buffer.from(`TEST-${Date.now()}`)
|
||||
const bls = new BufferListStream(data)
|
||||
const bl = new BufferList(bls)
|
||||
t.ok(bl.slice().equals(bls.slice()))
|
||||
t.end()
|
||||
})
|
||||
492
node_modules/bl/test/indexOf.js
generated
vendored
Normal file
492
node_modules/bl/test/indexOf.js
generated
vendored
Normal file
@ -0,0 +1,492 @@
|
||||
'use strict'
|
||||
|
||||
const tape = require('tape')
|
||||
const BufferList = require('../')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
tape('indexOf single byte needle', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'abcdefg', '12345'])
|
||||
|
||||
t.equal(bl.indexOf('e'), 4)
|
||||
t.equal(bl.indexOf('e', 5), 11)
|
||||
t.equal(bl.indexOf('e', 12), -1)
|
||||
t.equal(bl.indexOf('5'), 18)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf multiple byte needle', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'abcdefg'])
|
||||
|
||||
t.equal(bl.indexOf('ef'), 4)
|
||||
t.equal(bl.indexOf('ef', 5), 11)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf multiple byte needles across buffer boundaries', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'abcdefg'])
|
||||
|
||||
t.equal(bl.indexOf('fgabc'), 5)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf takes a Uint8Array search', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'abcdefg'])
|
||||
const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc
|
||||
|
||||
t.equal(bl.indexOf(search), 5)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf takes a buffer list search', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'abcdefg'])
|
||||
const search = new BufferList('fgabc')
|
||||
|
||||
t.equal(bl.indexOf(search), 5)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf a zero byte needle', (t) => {
|
||||
const b = new BufferList('abcdef')
|
||||
const bufEmpty = Buffer.from('')
|
||||
|
||||
t.equal(b.indexOf(''), 0)
|
||||
t.equal(b.indexOf('', 1), 1)
|
||||
t.equal(b.indexOf('', b.length + 1), b.length)
|
||||
t.equal(b.indexOf('', Infinity), b.length)
|
||||
t.equal(b.indexOf(bufEmpty), 0)
|
||||
t.equal(b.indexOf(bufEmpty, 1), 1)
|
||||
t.equal(b.indexOf(bufEmpty, b.length + 1), b.length)
|
||||
t.equal(b.indexOf(bufEmpty, Infinity), b.length)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf buffers smaller and larger than the needle', (t) => {
|
||||
const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab'])
|
||||
|
||||
t.equal(bl.indexOf('fgabc'), 5)
|
||||
t.equal(bl.indexOf('fgabc', 6), 12)
|
||||
t.equal(bl.indexOf('fgabc', 13), -1)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
// only present in node 6+
|
||||
;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => {
|
||||
const b = new BufferList('abcdef')
|
||||
|
||||
// test latin1 encoding
|
||||
t.equal(
|
||||
new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
|
||||
.indexOf('d', 0, 'latin1'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from(b.toString('latin1'), 'latin1'))
|
||||
.indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('aa\u00e8aa', 'latin1'))
|
||||
.indexOf('\u00e8', 'latin1'),
|
||||
2
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('\u00e8', 'latin1'))
|
||||
.indexOf('\u00e8', 'latin1'),
|
||||
0
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('\u00e8', 'latin1'))
|
||||
.indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'),
|
||||
0
|
||||
)
|
||||
|
||||
// test binary encoding
|
||||
t.equal(
|
||||
new BufferList(Buffer.from(b.toString('binary'), 'binary'))
|
||||
.indexOf('d', 0, 'binary'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from(b.toString('binary'), 'binary'))
|
||||
.indexOf(Buffer.from('d', 'binary'), 0, 'binary'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('aa\u00e8aa', 'binary'))
|
||||
.indexOf('\u00e8', 'binary'),
|
||||
2
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('\u00e8', 'binary'))
|
||||
.indexOf('\u00e8', 'binary'),
|
||||
0
|
||||
)
|
||||
t.equal(
|
||||
new BufferList(Buffer.from('\u00e8', 'binary'))
|
||||
.indexOf(Buffer.from('\u00e8', 'binary'), 'binary'),
|
||||
0
|
||||
)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('indexOf the entire nodejs10 buffer test suite', (t) => {
|
||||
const b = new BufferList('abcdef')
|
||||
const bufA = Buffer.from('a')
|
||||
const bufBc = Buffer.from('bc')
|
||||
const bufF = Buffer.from('f')
|
||||
const bufZ = Buffer.from('z')
|
||||
|
||||
const stringComparison = 'abcdef'
|
||||
|
||||
t.equal(b.indexOf('a'), 0)
|
||||
t.equal(b.indexOf('a', 1), -1)
|
||||
t.equal(b.indexOf('a', -1), -1)
|
||||
t.equal(b.indexOf('a', -4), -1)
|
||||
t.equal(b.indexOf('a', -b.length), 0)
|
||||
t.equal(b.indexOf('a', NaN), 0)
|
||||
t.equal(b.indexOf('a', -Infinity), 0)
|
||||
t.equal(b.indexOf('a', Infinity), -1)
|
||||
t.equal(b.indexOf('bc'), 1)
|
||||
t.equal(b.indexOf('bc', 2), -1)
|
||||
t.equal(b.indexOf('bc', -1), -1)
|
||||
t.equal(b.indexOf('bc', -3), -1)
|
||||
t.equal(b.indexOf('bc', -5), 1)
|
||||
t.equal(b.indexOf('bc', NaN), 1)
|
||||
t.equal(b.indexOf('bc', -Infinity), 1)
|
||||
t.equal(b.indexOf('bc', Infinity), -1)
|
||||
t.equal(b.indexOf('f'), b.length - 1)
|
||||
t.equal(b.indexOf('z'), -1)
|
||||
|
||||
// empty search tests
|
||||
t.equal(b.indexOf(bufA), 0)
|
||||
t.equal(b.indexOf(bufA, 1), -1)
|
||||
t.equal(b.indexOf(bufA, -1), -1)
|
||||
t.equal(b.indexOf(bufA, -4), -1)
|
||||
t.equal(b.indexOf(bufA, -b.length), 0)
|
||||
t.equal(b.indexOf(bufA, NaN), 0)
|
||||
t.equal(b.indexOf(bufA, -Infinity), 0)
|
||||
t.equal(b.indexOf(bufA, Infinity), -1)
|
||||
t.equal(b.indexOf(bufBc), 1)
|
||||
t.equal(b.indexOf(bufBc, 2), -1)
|
||||
t.equal(b.indexOf(bufBc, -1), -1)
|
||||
t.equal(b.indexOf(bufBc, -3), -1)
|
||||
t.equal(b.indexOf(bufBc, -5), 1)
|
||||
t.equal(b.indexOf(bufBc, NaN), 1)
|
||||
t.equal(b.indexOf(bufBc, -Infinity), 1)
|
||||
t.equal(b.indexOf(bufBc, Infinity), -1)
|
||||
t.equal(b.indexOf(bufF), b.length - 1)
|
||||
t.equal(b.indexOf(bufZ), -1)
|
||||
t.equal(b.indexOf(0x61), 0)
|
||||
t.equal(b.indexOf(0x61, 1), -1)
|
||||
t.equal(b.indexOf(0x61, -1), -1)
|
||||
t.equal(b.indexOf(0x61, -4), -1)
|
||||
t.equal(b.indexOf(0x61, -b.length), 0)
|
||||
t.equal(b.indexOf(0x61, NaN), 0)
|
||||
t.equal(b.indexOf(0x61, -Infinity), 0)
|
||||
t.equal(b.indexOf(0x61, Infinity), -1)
|
||||
t.equal(b.indexOf(0x0), -1)
|
||||
|
||||
// test offsets
|
||||
t.equal(b.indexOf('d', 2), 3)
|
||||
t.equal(b.indexOf('f', 5), 5)
|
||||
t.equal(b.indexOf('f', -1), 5)
|
||||
t.equal(b.indexOf('f', 6), -1)
|
||||
|
||||
t.equal(b.indexOf(Buffer.from('d'), 2), 3)
|
||||
t.equal(b.indexOf(Buffer.from('f'), 5), 5)
|
||||
t.equal(b.indexOf(Buffer.from('f'), -1), 5)
|
||||
t.equal(b.indexOf(Buffer.from('f'), 6), -1)
|
||||
|
||||
t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1)
|
||||
|
||||
// test invalid and uppercase encoding
|
||||
t.equal(b.indexOf('b', 'utf8'), 1)
|
||||
t.equal(b.indexOf('b', 'UTF8'), 1)
|
||||
t.equal(b.indexOf('62', 'HEX'), 1)
|
||||
t.throws(() => b.indexOf('bad', 'enc'), TypeError)
|
||||
|
||||
// test hex encoding
|
||||
t.equal(
|
||||
Buffer.from(b.toString('hex'), 'hex')
|
||||
.indexOf('64', 0, 'hex'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
Buffer.from(b.toString('hex'), 'hex')
|
||||
.indexOf(Buffer.from('64', 'hex'), 0, 'hex'),
|
||||
3
|
||||
)
|
||||
|
||||
// test base64 encoding
|
||||
t.equal(
|
||||
Buffer.from(b.toString('base64'), 'base64')
|
||||
.indexOf('ZA==', 0, 'base64'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
Buffer.from(b.toString('base64'), 'base64')
|
||||
.indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'),
|
||||
3
|
||||
)
|
||||
|
||||
// test ascii encoding
|
||||
t.equal(
|
||||
Buffer.from(b.toString('ascii'), 'ascii')
|
||||
.indexOf('d', 0, 'ascii'),
|
||||
3
|
||||
)
|
||||
t.equal(
|
||||
Buffer.from(b.toString('ascii'), 'ascii')
|
||||
.indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'),
|
||||
3
|
||||
)
|
||||
|
||||
// test optional offset with passed encoding
|
||||
t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4)
|
||||
t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4)
|
||||
|
||||
{
|
||||
// test usc2 encoding
|
||||
const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
|
||||
|
||||
t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2'))
|
||||
t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2'))
|
||||
t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2'))
|
||||
t.equal(4, twoByteString.indexOf(
|
||||
Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2'))
|
||||
t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2'))
|
||||
}
|
||||
|
||||
const mixedByteStringUcs2 =
|
||||
Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2')
|
||||
|
||||
t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2'))
|
||||
t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2'))
|
||||
t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2'))
|
||||
|
||||
t.equal(
|
||||
6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2'))
|
||||
t.equal(
|
||||
10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2'))
|
||||
t.equal(
|
||||
-1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2'))
|
||||
|
||||
{
|
||||
const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2')
|
||||
|
||||
// Test single char pattern
|
||||
t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2'))
|
||||
let index = twoByteString.indexOf('\u0391', 0, 'ucs2')
|
||||
t.equal(2, index, `Alpha - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u03a3', 0, 'ucs2')
|
||||
t.equal(4, index, `First Sigma - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u03a3', 6, 'ucs2')
|
||||
t.equal(6, index, `Second Sigma - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u0395', 0, 'ucs2')
|
||||
t.equal(8, index, `Epsilon - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u0392', 0, 'ucs2')
|
||||
t.equal(-1, index, `Not beta - at index ${index}`)
|
||||
|
||||
// Test multi-char pattern
|
||||
index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2')
|
||||
t.equal(0, index, `Lambda Alpha - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2')
|
||||
t.equal(2, index, `Alpha Sigma - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2')
|
||||
t.equal(4, index, `Sigma Sigma - at index ${index}`)
|
||||
index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2')
|
||||
t.equal(6, index, `Sigma Epsilon - at index ${index}`)
|
||||
}
|
||||
|
||||
const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395')
|
||||
|
||||
t.equal(5, mixedByteStringUtf8.indexOf('bc'))
|
||||
t.equal(5, mixedByteStringUtf8.indexOf('bc', 5))
|
||||
t.equal(5, mixedByteStringUtf8.indexOf('bc', -8))
|
||||
t.equal(7, mixedByteStringUtf8.indexOf('\u03a3'))
|
||||
t.equal(-1, mixedByteStringUtf8.indexOf('\u0396'))
|
||||
|
||||
// Test complex string indexOf algorithms. Only trigger for long strings.
|
||||
// Long string that isn't a simple repeat of a shorter string.
|
||||
let longString = 'A'
|
||||
for (let i = 66; i < 76; i++) { // from 'B' to 'K'
|
||||
longString = longString + String.fromCharCode(i) + longString
|
||||
}
|
||||
|
||||
const longBufferString = Buffer.from(longString)
|
||||
|
||||
// pattern of 15 chars, repeated every 16 chars in long
|
||||
let pattern = 'ABACABADABACABA'
|
||||
for (let i = 0; i < longBufferString.length - pattern.length; i += 7) {
|
||||
const index = longBufferString.indexOf(pattern, i)
|
||||
t.equal((i + 15) & ~0xf, index,
|
||||
`Long ABACABA...-string at index ${i}`)
|
||||
}
|
||||
|
||||
let index = longBufferString.indexOf('AJABACA')
|
||||
t.equal(510, index, `Long AJABACA, First J - at index ${index}`)
|
||||
index = longBufferString.indexOf('AJABACA', 511)
|
||||
t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`)
|
||||
|
||||
pattern = 'JABACABADABACABA'
|
||||
index = longBufferString.indexOf(pattern)
|
||||
t.equal(511, index, `Long JABACABA..., First J - at index ${index}`)
|
||||
index = longBufferString.indexOf(pattern, 512)
|
||||
t.equal(
|
||||
1535, index, `Long JABACABA..., Second J - at index ${index}`)
|
||||
|
||||
// Search for a non-ASCII string in a pure ASCII string.
|
||||
const asciiString = Buffer.from(
|
||||
'somethingnotatallsinisterwhichalsoworks')
|
||||
t.equal(-1, asciiString.indexOf('\x2061'))
|
||||
t.equal(3, asciiString.indexOf('eth', 0))
|
||||
|
||||
// Search in string containing many non-ASCII chars.
|
||||
const allCodePoints = []
|
||||
for (let i = 0; i < 65536; i++) {
|
||||
allCodePoints[i] = i
|
||||
}
|
||||
|
||||
const allCharsString = String.fromCharCode.apply(String, allCodePoints)
|
||||
const allCharsBufferUtf8 = Buffer.from(allCharsString)
|
||||
const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2')
|
||||
|
||||
// Search for string long enough to trigger complex search with ASCII pattern
|
||||
// and UC16 subject.
|
||||
t.equal(-1, allCharsBufferUtf8.indexOf('notfound'))
|
||||
t.equal(-1, allCharsBufferUcs2.indexOf('notfound'))
|
||||
|
||||
// Needle is longer than haystack, but only because it's encoded as UTF-16
|
||||
t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1)
|
||||
|
||||
t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0)
|
||||
t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1)
|
||||
|
||||
// Haystack has odd length, but the needle is UCS2.
|
||||
t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1)
|
||||
|
||||
{
|
||||
// Find substrings in Utf8.
|
||||
const lengths = [1, 3, 15] // Single char, simple and complex.
|
||||
const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b]
|
||||
for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
|
||||
for (let i = 0; i < indices.length; i++) {
|
||||
const index = indices[i]
|
||||
let length = lengths[lengthIndex]
|
||||
|
||||
if (index + length > 0x7F) {
|
||||
length = 2 * length
|
||||
}
|
||||
|
||||
if (index + length > 0x7FF) {
|
||||
length = 3 * length
|
||||
}
|
||||
|
||||
if (index + length > 0xFFFF) {
|
||||
length = 4 * length
|
||||
}
|
||||
|
||||
const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length)
|
||||
t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8))
|
||||
|
||||
const patternStringUtf8 = patternBufferUtf8.toString()
|
||||
t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// Find substrings in Usc2.
|
||||
const lengths = [2, 4, 16] // Single char, simple and complex.
|
||||
const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0]
|
||||
|
||||
for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) {
|
||||
for (let i = 0; i < indices.length; i++) {
|
||||
const index = indices[i] * 2
|
||||
const length = lengths[lengthIndex]
|
||||
|
||||
const patternBufferUcs2 =
|
||||
allCharsBufferUcs2.slice(index, index + length)
|
||||
t.equal(
|
||||
index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2'))
|
||||
|
||||
const patternStringUcs2 = patternBufferUcs2.toString('ucs2')
|
||||
t.equal(
|
||||
index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[
|
||||
() => {},
|
||||
{},
|
||||
[]
|
||||
].forEach((val) => {
|
||||
t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`)
|
||||
})
|
||||
|
||||
// Test weird offset arguments.
|
||||
// The following offsets coerce to NaN or 0, searching the whole Buffer
|
||||
t.equal(b.indexOf('b', undefined), 1)
|
||||
t.equal(b.indexOf('b', {}), 1)
|
||||
t.equal(b.indexOf('b', 0), 1)
|
||||
t.equal(b.indexOf('b', null), 1)
|
||||
t.equal(b.indexOf('b', []), 1)
|
||||
|
||||
// The following offset coerces to 2, in other words +[2] === 2
|
||||
t.equal(b.indexOf('b', [2]), -1)
|
||||
|
||||
// Behavior should match String.indexOf()
|
||||
t.equal(
|
||||
b.indexOf('b', undefined),
|
||||
stringComparison.indexOf('b', undefined))
|
||||
t.equal(
|
||||
b.indexOf('b', {}),
|
||||
stringComparison.indexOf('b', {}))
|
||||
t.equal(
|
||||
b.indexOf('b', 0),
|
||||
stringComparison.indexOf('b', 0))
|
||||
t.equal(
|
||||
b.indexOf('b', null),
|
||||
stringComparison.indexOf('b', null))
|
||||
t.equal(
|
||||
b.indexOf('b', []),
|
||||
stringComparison.indexOf('b', []))
|
||||
t.equal(
|
||||
b.indexOf('b', [2]),
|
||||
stringComparison.indexOf('b', [2]))
|
||||
|
||||
// test truncation of Number arguments to uint8
|
||||
{
|
||||
const buf = Buffer.from('this is a test')
|
||||
|
||||
t.equal(buf.indexOf(0x6973), 3)
|
||||
t.equal(buf.indexOf(0x697320), 4)
|
||||
t.equal(buf.indexOf(0x69732069), 2)
|
||||
t.equal(buf.indexOf(0x697374657374), 0)
|
||||
t.equal(buf.indexOf(0x69737374), 0)
|
||||
t.equal(buf.indexOf(0x69737465), 11)
|
||||
t.equal(buf.indexOf(0x69737465), 11)
|
||||
t.equal(buf.indexOf(-140), 0)
|
||||
t.equal(buf.indexOf(-152), 1)
|
||||
t.equal(buf.indexOf(0xff), -1)
|
||||
t.equal(buf.indexOf(0xffff), -1)
|
||||
}
|
||||
|
||||
// Test that Uint8Array arguments are okay.
|
||||
{
|
||||
const needle = new Uint8Array([0x66, 0x6f, 0x6f])
|
||||
const haystack = new BufferList(Buffer.from('a foo b foo'))
|
||||
t.equal(haystack.indexOf(needle), 2)
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
32
node_modules/bl/test/isBufferList.js
generated
vendored
Normal file
32
node_modules/bl/test/isBufferList.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
'use strict'
|
||||
|
||||
const tape = require('tape')
|
||||
const { BufferList, BufferListStream } = require('../')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
tape('isBufferList positives', (t) => {
|
||||
t.ok(BufferList.isBufferList(new BufferList()))
|
||||
t.ok(BufferList.isBufferList(new BufferListStream()))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('isBufferList negatives', (t) => {
|
||||
const types = [
|
||||
null,
|
||||
undefined,
|
||||
NaN,
|
||||
true,
|
||||
false,
|
||||
{},
|
||||
[],
|
||||
Buffer.alloc(0),
|
||||
[Buffer.alloc(0)]
|
||||
]
|
||||
|
||||
for (const obj of types) {
|
||||
t.notOk(BufferList.isBufferList(obj))
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
948
node_modules/bl/test/test.js
generated
vendored
Normal file
948
node_modules/bl/test/test.js
generated
vendored
Normal file
@ -0,0 +1,948 @@
|
||||
// @ts-check
|
||||
'use strict'
|
||||
|
||||
const tape = require('tape')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const BufferListStream = require('../')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
/**
|
||||
* This typedef allows us to add _bufs to the API without declaring it publicly on types.
|
||||
* @typedef { BufferListStream & { _bufs?: Buffer[] }} BufferListStreamWithPrivate
|
||||
*/
|
||||
|
||||
/**
|
||||
* Just for typechecking in js
|
||||
* @type { NodeJS.Process & { browser?: boolean }}
|
||||
*/
|
||||
|
||||
const process = globalThis.process
|
||||
|
||||
/** @type {BufferEncoding[]} */
|
||||
const encodings = ['ascii', 'utf8', 'utf-8', 'hex', 'binary', 'base64']
|
||||
|
||||
if (process.browser) {
|
||||
encodings.push(
|
||||
'ucs2',
|
||||
'ucs-2',
|
||||
'utf16le',
|
||||
/**
|
||||
* This alias is not in typescript typings for BufferEncoding. Still have to fix
|
||||
* @see https://nodejs.org/api/buffer.html#buffers-and-character-encodings
|
||||
*/
|
||||
// @ts-ignore
|
||||
'utf-16le'
|
||||
)
|
||||
}
|
||||
|
||||
require('./indexOf')
|
||||
require('./isBufferList')
|
||||
require('./convert')
|
||||
|
||||
tape('single bytes from single buffer', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
|
||||
t.equal(bl.length, 4)
|
||||
t.equal(bl.get(-1), undefined)
|
||||
t.equal(bl.get(0), 97)
|
||||
t.equal(bl.get(1), 98)
|
||||
t.equal(bl.get(2), 99)
|
||||
t.equal(bl.get(3), 100)
|
||||
t.equal(bl.get(4), undefined)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('single bytes from multiple buffers', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
|
||||
t.equal(bl.length, 10)
|
||||
|
||||
t.equal(bl.get(0), 97)
|
||||
t.equal(bl.get(1), 98)
|
||||
t.equal(bl.get(2), 99)
|
||||
t.equal(bl.get(3), 100)
|
||||
t.equal(bl.get(4), 101)
|
||||
t.equal(bl.get(5), 102)
|
||||
t.equal(bl.get(6), 103)
|
||||
t.equal(bl.get(7), 104)
|
||||
t.equal(bl.get(8), 105)
|
||||
t.equal(bl.get(9), 106)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('multi bytes from single buffer', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
|
||||
t.equal(bl.length, 4)
|
||||
|
||||
t.equal(bl.slice(0, 4).toString('ascii'), 'abcd')
|
||||
t.equal(bl.slice(0, 3).toString('ascii'), 'abc')
|
||||
t.equal(bl.slice(1, 4).toString('ascii'), 'bcd')
|
||||
t.equal(bl.slice(-4, -1).toString('ascii'), 'abc')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('multi bytes from single buffer (negative indexes)', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('buffer'))
|
||||
|
||||
t.equal(bl.length, 6)
|
||||
|
||||
t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe')
|
||||
t.equal(bl.slice(-6, -2).toString('ascii'), 'buff')
|
||||
t.equal(bl.slice(-5, -2).toString('ascii'), 'uff')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('multiple bytes from multiple buffers', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
|
||||
t.equal(bl.length, 10)
|
||||
|
||||
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
|
||||
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
|
||||
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
|
||||
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
|
||||
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
|
||||
t.equal(bl.slice(-7, -4).toString('ascii'), 'def')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('multiple bytes from multiple buffer lists', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(new BufferListStream([Buffer.from('abcd'), Buffer.from('efg')]))
|
||||
bl.append(new BufferListStream([Buffer.from('hi'), Buffer.from('j')]))
|
||||
|
||||
t.equal(bl.length, 10)
|
||||
|
||||
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
|
||||
|
||||
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
|
||||
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
|
||||
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
|
||||
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
// same data as previous test, just using nested constructors
|
||||
tape('multiple bytes from crazy nested buffer lists', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(
|
||||
new BufferListStream([
|
||||
new BufferListStream([
|
||||
new BufferListStream(Buffer.from('abc')),
|
||||
Buffer.from('d'),
|
||||
new BufferListStream(Buffer.from('efg'))
|
||||
]),
|
||||
new BufferListStream([Buffer.from('hi')]),
|
||||
new BufferListStream(Buffer.from('j'))
|
||||
])
|
||||
)
|
||||
|
||||
t.equal(bl.length, 10)
|
||||
|
||||
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
|
||||
|
||||
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
|
||||
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
|
||||
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
|
||||
t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('append accepts arrays of Buffers', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abc'))
|
||||
bl.append([Buffer.from('def')])
|
||||
bl.append([Buffer.from('ghi'), Buffer.from('jkl')])
|
||||
bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')])
|
||||
t.equal(bl.length, 26)
|
||||
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('append accepts arrays of Uint8Arrays', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(new Uint8Array([97, 98, 99]))
|
||||
bl.append([Uint8Array.from([100, 101, 102])])
|
||||
bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])])
|
||||
bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])])
|
||||
t.equal(bl.length, 26)
|
||||
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('append accepts arrays of BufferLists', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abc'))
|
||||
bl.append([new BufferListStream('def')])
|
||||
bl.append(
|
||||
new BufferListStream([Buffer.from('ghi'), new BufferListStream('jkl')])
|
||||
)
|
||||
bl.append([
|
||||
Buffer.from('mnop'),
|
||||
new BufferListStream([Buffer.from('qrstu'), Buffer.from('vwxyz')])
|
||||
])
|
||||
t.equal(bl.length, 26)
|
||||
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('append chainable', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
t.ok(bl.append(Buffer.from('abcd')) === bl)
|
||||
t.ok(bl.append([Buffer.from('abcd')]) === bl)
|
||||
t.ok(bl.append(new BufferListStream(Buffer.from('abcd'))) === bl)
|
||||
t.ok(bl.append([new BufferListStream(Buffer.from('abcd'))]) === bl)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('append chainable (test results)', function (t) {
|
||||
const bl = new BufferListStream('abc')
|
||||
.append([new BufferListStream('def')])
|
||||
.append(
|
||||
new BufferListStream([Buffer.from('ghi'), new BufferListStream('jkl')])
|
||||
)
|
||||
.append([
|
||||
Buffer.from('mnop'),
|
||||
new BufferListStream([Buffer.from('qrstu'), Buffer.from('vwxyz')])
|
||||
])
|
||||
|
||||
t.equal(bl.length, 26)
|
||||
t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('consuming from multiple buffers', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
|
||||
t.equal(bl.length, 10)
|
||||
|
||||
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
|
||||
|
||||
bl.consume(3)
|
||||
t.equal(bl.length, 7)
|
||||
t.equal(bl.slice(0, 7).toString('ascii'), 'defghij')
|
||||
|
||||
bl.consume(2)
|
||||
t.equal(bl.length, 5)
|
||||
t.equal(bl.slice(0, 5).toString('ascii'), 'fghij')
|
||||
|
||||
bl.consume(1)
|
||||
t.equal(bl.length, 4)
|
||||
t.equal(bl.slice(0, 4).toString('ascii'), 'ghij')
|
||||
|
||||
bl.consume(1)
|
||||
t.equal(bl.length, 3)
|
||||
t.equal(bl.slice(0, 3).toString('ascii'), 'hij')
|
||||
|
||||
bl.consume(2)
|
||||
t.equal(bl.length, 1)
|
||||
t.equal(bl.slice(0, 1).toString('ascii'), 'j')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('complete consumption', function (t) {
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('a'))
|
||||
bl.append(Buffer.from('b'))
|
||||
|
||||
bl.consume(2)
|
||||
|
||||
t.equal(bl.length, 0)
|
||||
t.equal(bl._bufs.length, 0)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readUInt8 / readInt8', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(3)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x1
|
||||
buf2[1] = 0x3
|
||||
buf2[2] = 0x4
|
||||
buf3[0] = 0x23
|
||||
buf3[1] = 0x42
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
t.equal(bl.readUInt8(), 0x1)
|
||||
t.equal(bl.readUInt8(2), 0x3)
|
||||
t.equal(bl.readInt8(2), 0x3)
|
||||
t.equal(bl.readUInt8(3), 0x4)
|
||||
t.equal(bl.readInt8(3), 0x4)
|
||||
t.equal(bl.readUInt8(4), 0x23)
|
||||
t.equal(bl.readInt8(4), 0x23)
|
||||
t.equal(bl.readUInt8(5), 0x42)
|
||||
t.equal(bl.readInt8(5), 0x42)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(3)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x1
|
||||
buf2[1] = 0x3
|
||||
buf2[2] = 0x4
|
||||
buf3[0] = 0x23
|
||||
buf3[1] = 0x42
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
t.equal(bl.readUInt16BE(), 0x0100)
|
||||
t.equal(bl.readUInt16LE(), 0x0001)
|
||||
t.equal(bl.readUInt16BE(2), 0x0304)
|
||||
t.equal(bl.readUInt16LE(2), 0x0403)
|
||||
t.equal(bl.readInt16BE(2), 0x0304)
|
||||
t.equal(bl.readInt16LE(2), 0x0403)
|
||||
t.equal(bl.readUInt16BE(3), 0x0423)
|
||||
t.equal(bl.readUInt16LE(3), 0x2304)
|
||||
t.equal(bl.readInt16BE(3), 0x0423)
|
||||
t.equal(bl.readInt16LE(3), 0x2304)
|
||||
t.equal(bl.readUInt16BE(4), 0x2342)
|
||||
t.equal(bl.readUInt16LE(4), 0x4223)
|
||||
t.equal(bl.readInt16BE(4), 0x2342)
|
||||
t.equal(bl.readInt16LE(4), 0x4223)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(3)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x1
|
||||
buf2[1] = 0x3
|
||||
buf2[2] = 0x4
|
||||
buf3[0] = 0x23
|
||||
buf3[1] = 0x42
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
t.equal(bl.readUInt32BE(), 0x01000304)
|
||||
t.equal(bl.readUInt32LE(), 0x04030001)
|
||||
t.equal(bl.readUInt32BE(2), 0x03042342)
|
||||
t.equal(bl.readUInt32LE(2), 0x42230403)
|
||||
t.equal(bl.readInt32BE(2), 0x03042342)
|
||||
t.equal(bl.readInt32LE(2), 0x42230403)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readBigUInt64LE / readBigUInt64BE / readBigInt64LE / readBigInt64BE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(2)
|
||||
const buf4 = Buffer.alloc(5)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x05
|
||||
buf2[0] = 0x07
|
||||
|
||||
buf2[1] = 0x03
|
||||
buf2[2] = 0x04
|
||||
buf3[0] = 0x23
|
||||
buf3[1] = 0x42
|
||||
buf4[0] = 0x00
|
||||
buf4[1] = 0x01
|
||||
buf4[2] = 0x02
|
||||
buf4[3] = 0x03
|
||||
|
||||
buf4[4] = 0x04
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
bl.append(buf4)
|
||||
|
||||
t.equal(bl.readBigUInt64BE(2), 0x0304234200010203n)
|
||||
t.equal(bl.readBigUInt64LE(2), 0x0302010042230403n)
|
||||
t.equal(bl.readBigInt64BE(2), 0x0304234200010203n)
|
||||
t.equal(bl.readBigInt64LE(2), 0x0302010042230403n)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(3)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf2[0] = 0x2
|
||||
buf2[1] = 0x3
|
||||
buf2[2] = 0x4
|
||||
buf3[0] = 0x23
|
||||
buf3[1] = 0x42
|
||||
buf3[2] = 0x61
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
t.equal(bl.readUIntBE(1, 1), 0x02)
|
||||
t.equal(bl.readUIntBE(1, 2), 0x0203)
|
||||
t.equal(bl.readUIntBE(1, 3), 0x020304)
|
||||
t.equal(bl.readUIntBE(1, 4), 0x02030423)
|
||||
t.equal(bl.readUIntBE(1, 5), 0x0203042342)
|
||||
t.equal(bl.readUIntBE(1, 6), 0x020304234261)
|
||||
t.equal(bl.readUIntLE(1, 1), 0x02)
|
||||
t.equal(bl.readUIntLE(1, 2), 0x0302)
|
||||
t.equal(bl.readUIntLE(1, 3), 0x040302)
|
||||
t.equal(bl.readUIntLE(1, 4), 0x23040302)
|
||||
t.equal(bl.readUIntLE(1, 5), 0x4223040302)
|
||||
t.equal(bl.readUIntLE(1, 6), 0x614223040302)
|
||||
t.equal(bl.readIntBE(1, 1), 0x02)
|
||||
t.equal(bl.readIntBE(1, 2), 0x0203)
|
||||
t.equal(bl.readIntBE(1, 3), 0x020304)
|
||||
t.equal(bl.readIntBE(1, 4), 0x02030423)
|
||||
t.equal(bl.readIntBE(1, 5), 0x0203042342)
|
||||
t.equal(bl.readIntBE(1, 6), 0x020304234261)
|
||||
t.equal(bl.readIntLE(1, 1), 0x02)
|
||||
t.equal(bl.readIntLE(1, 2), 0x0302)
|
||||
t.equal(bl.readIntLE(1, 3), 0x040302)
|
||||
t.equal(bl.readIntLE(1, 4), 0x23040302)
|
||||
t.equal(bl.readIntLE(1, 5), 0x4223040302)
|
||||
t.equal(bl.readIntLE(1, 6), 0x614223040302)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readFloatLE / readFloatBE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(3)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x01
|
||||
buf2[1] = 0x00
|
||||
buf2[2] = 0x00
|
||||
buf3[0] = 0x80
|
||||
buf3[1] = 0x3f
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
const canonical = Buffer.concat([buf1, buf2, buf3])
|
||||
t.equal(bl.readFloatLE(), canonical.readFloatLE())
|
||||
t.equal(bl.readFloatBE(), canonical.readFloatBE())
|
||||
t.equal(bl.readFloatLE(2), canonical.readFloatLE(2))
|
||||
t.equal(bl.readFloatBE(2), canonical.readFloatBE(2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test readDoubleLE / readDoubleBE', function (t) {
|
||||
const buf1 = Buffer.alloc(1)
|
||||
const buf2 = Buffer.alloc(3)
|
||||
const buf3 = Buffer.alloc(10)
|
||||
const bl = new BufferListStream()
|
||||
|
||||
buf1[0] = 0x01
|
||||
buf2[1] = 0x55
|
||||
buf2[2] = 0x55
|
||||
buf3[0] = 0x55
|
||||
buf3[1] = 0x55
|
||||
buf3[2] = 0x55
|
||||
buf3[3] = 0x55
|
||||
buf3[4] = 0xd5
|
||||
buf3[5] = 0x3f
|
||||
|
||||
bl.append(buf1)
|
||||
bl.append(buf2)
|
||||
bl.append(buf3)
|
||||
|
||||
const canonical = Buffer.concat([buf1, buf2, buf3])
|
||||
t.equal(bl.readDoubleBE(), canonical.readDoubleBE())
|
||||
t.equal(bl.readDoubleLE(), canonical.readDoubleLE())
|
||||
t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2))
|
||||
t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test toString', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
|
||||
t.equal(bl.toString('ascii', 0, 10), 'abcdefghij')
|
||||
t.equal(bl.toString('ascii', 3, 10), 'defghij')
|
||||
t.equal(bl.toString('ascii', 3, 6), 'def')
|
||||
t.equal(bl.toString('ascii', 3, 8), 'defgh')
|
||||
t.equal(bl.toString('ascii', 5, 10), 'fghij')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test toString encoding', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
const b = Buffer.from('abcdefghij\xff\x00')
|
||||
|
||||
bl.append(Buffer.from('abcd'))
|
||||
bl.append(Buffer.from('efg'))
|
||||
bl.append(Buffer.from('hi'))
|
||||
bl.append(Buffer.from('j'))
|
||||
bl.append(Buffer.from('\xff\x00'))
|
||||
|
||||
encodings.forEach(function (enc) {
|
||||
t.equal(bl.toString(enc), b.toString(enc), enc)
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('uninitialized memory', function (t) {
|
||||
const secret = crypto.randomBytes(256)
|
||||
for (let i = 0; i < 1e6; i++) {
|
||||
const clone = Buffer.from(secret)
|
||||
const bl = new BufferListStream()
|
||||
bl.append(Buffer.from('a'))
|
||||
bl.consume(-1024)
|
||||
const buf = bl.slice(1)
|
||||
if (buf.indexOf(clone) !== -1) {
|
||||
t.fail(`Match (at ${i})`)
|
||||
break
|
||||
}
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
!process.browser && tape('test stream', function (t) {
|
||||
const random = crypto.randomBytes(65534)
|
||||
|
||||
const bl = new BufferListStream((err, buf) => {
|
||||
t.ok(Buffer.isBuffer(buf))
|
||||
t.ok(err === null)
|
||||
t.ok(random.equals(bl.slice()))
|
||||
t.ok(random.equals(buf.slice()))
|
||||
|
||||
bl.pipe(fs.createWriteStream(path.join(os.tmpdir(), 'bl_test_rnd_out.dat')))
|
||||
.on('close', function () {
|
||||
const rndhash = crypto.createHash('md5').update(random).digest('hex')
|
||||
const md5sum = crypto.createHash('md5')
|
||||
const s = fs.createReadStream(path.join(os.tmpdir(), 'bl_test_rnd_out.dat'))
|
||||
|
||||
s.on('data', md5sum.update.bind(md5sum))
|
||||
s.on('end', function () {
|
||||
t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
fs.writeFileSync(path.join(os.tmpdir(), 'bl_test_rnd.dat'), random)
|
||||
fs.createReadStream(path.join(os.tmpdir(), 'bl_test_rnd.dat')).pipe(bl)
|
||||
})
|
||||
|
||||
tape('instantiation with Buffer', function (t) {
|
||||
const buf = crypto.randomBytes(1024)
|
||||
const buf2 = crypto.randomBytes(1024)
|
||||
let b = BufferListStream(buf)
|
||||
|
||||
t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer')
|
||||
b = BufferListStream([buf, buf2])
|
||||
t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test String appendage', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
const b = Buffer.from('abcdefghij\xff\x00')
|
||||
|
||||
bl.append('abcd')
|
||||
bl.append('efg')
|
||||
bl.append('hi')
|
||||
bl.append('j')
|
||||
bl.append('\xff\x00')
|
||||
|
||||
encodings.forEach(function (enc) {
|
||||
t.equal(bl.toString(enc), b.toString(enc))
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('test Number appendage', function (t) {
|
||||
const bl = new BufferListStream()
|
||||
const b = Buffer.from('1234567890')
|
||||
|
||||
bl.append(1234)
|
||||
bl.append(567)
|
||||
bl.append(89)
|
||||
bl.append(0)
|
||||
|
||||
encodings.forEach(function (enc) {
|
||||
t.equal(bl.toString(enc), b.toString(enc))
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('write nothing, should get empty buffer', function (t) {
|
||||
t.plan(3)
|
||||
BufferListStream(function (err, data) {
|
||||
t.notOk(err, 'no error')
|
||||
t.ok(Buffer.isBuffer(data), 'got a buffer')
|
||||
t.equal(0, data.length, 'got a zero-length buffer')
|
||||
t.end()
|
||||
}).end()
|
||||
})
|
||||
|
||||
tape('unicode string', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const inp1 = '\u2600'
|
||||
const inp2 = '\u2603'
|
||||
const exp = inp1 + ' and ' + inp2
|
||||
const bl = BufferListStream()
|
||||
|
||||
bl.write(inp1)
|
||||
bl.write(' and ')
|
||||
bl.write(inp2)
|
||||
t.equal(exp, bl.toString())
|
||||
t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex'))
|
||||
})
|
||||
|
||||
tape('should emit finish', function (t) {
|
||||
const source = BufferListStream()
|
||||
const dest = BufferListStream()
|
||||
|
||||
source.write('hello')
|
||||
source.pipe(dest)
|
||||
|
||||
dest.on('finish', function () {
|
||||
t.equal(dest.toString('utf8'), 'hello')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
tape('basic copy', function (t) {
|
||||
const buf = crypto.randomBytes(1024)
|
||||
const buf2 = Buffer.alloc(1024)
|
||||
const b = BufferListStream(buf)
|
||||
|
||||
b.copy(buf2)
|
||||
t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('copy after many appends', function (t) {
|
||||
const buf = crypto.randomBytes(512)
|
||||
const buf2 = Buffer.alloc(1024)
|
||||
const b = BufferListStream(buf)
|
||||
|
||||
b.append(buf)
|
||||
b.copy(buf2)
|
||||
t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('copy at a precise position', function (t) {
|
||||
const buf = crypto.randomBytes(1004)
|
||||
const buf2 = Buffer.alloc(1024)
|
||||
const b = BufferListStream(buf)
|
||||
|
||||
b.copy(buf2, 20)
|
||||
t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('copy starting from a precise location', function (t) {
|
||||
const buf = crypto.randomBytes(10)
|
||||
const buf2 = Buffer.alloc(5)
|
||||
const b = BufferListStream(buf)
|
||||
|
||||
b.copy(buf2, 0, 5)
|
||||
t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('copy in an interval', function (t) {
|
||||
const rnd = crypto.randomBytes(10)
|
||||
const b = BufferListStream(rnd) // put the random bytes there
|
||||
const actual = Buffer.alloc(3)
|
||||
const expected = Buffer.alloc(3)
|
||||
|
||||
rnd.copy(expected, 0, 5, 8)
|
||||
b.copy(actual, 0, 5, 8)
|
||||
|
||||
t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('copy an interval between two buffers', function (t) {
|
||||
const buf = crypto.randomBytes(10)
|
||||
const buf2 = Buffer.alloc(10)
|
||||
const b = BufferListStream(buf)
|
||||
|
||||
b.append(buf)
|
||||
b.copy(buf2, 0, 5, 15)
|
||||
|
||||
t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('shallow slice across buffer boundaries', function (t) {
|
||||
const bl = new BufferListStream(['First', 'Second', 'Third'])
|
||||
|
||||
t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('shallow slice within single buffer', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const bl = new BufferListStream(['First', 'Second', 'Third'])
|
||||
|
||||
t.equal(bl.shallowSlice(5, 10).toString(), 'Secon')
|
||||
t.equal(bl.shallowSlice(7, 10).toString(), 'con')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
tape('shallow slice single buffer', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const bl = new BufferListStream(['First', 'Second', 'Third'])
|
||||
|
||||
t.equal(bl.shallowSlice(0, 5).toString(), 'First')
|
||||
t.equal(bl.shallowSlice(5, 11).toString(), 'Second')
|
||||
t.equal(bl.shallowSlice(11, 16).toString(), 'Third')
|
||||
})
|
||||
|
||||
tape('shallow slice with negative or omitted indices', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const bl = new BufferListStream(['First', 'Second', 'Third'])
|
||||
|
||||
t.equal(bl.shallowSlice().toString(), 'FirstSecondThird')
|
||||
t.equal(bl.shallowSlice(5).toString(), 'SecondThird')
|
||||
t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh')
|
||||
t.equal(bl.shallowSlice(-8).toString(), 'ondThird')
|
||||
})
|
||||
|
||||
tape('shallow slice does not make a copy', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
|
||||
const bl = new BufferListStream(buffers).shallowSlice(5, -3)
|
||||
|
||||
buffers[1].fill('h')
|
||||
buffers[2].fill('h')
|
||||
|
||||
t.equal(bl.toString(), 'hhhhhhhh')
|
||||
})
|
||||
|
||||
tape('shallow slice with 0 length', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
|
||||
const bl = (new BufferListStream(buffers)).shallowSlice(0, 0)
|
||||
|
||||
t.equal(bl.length, 0)
|
||||
})
|
||||
|
||||
tape('shallow slice with 0 length from middle', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')]
|
||||
const bl = (new BufferListStream(buffers)).shallowSlice(10, 10)
|
||||
|
||||
t.equal(bl.length, 0)
|
||||
})
|
||||
|
||||
tape('duplicate', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const bl = new BufferListStream('abcdefghij\xff\x00')
|
||||
const dup = bl.duplicate()
|
||||
|
||||
t.equal(bl.prototype, dup.prototype)
|
||||
t.equal(bl.toString('hex'), dup.toString('hex'))
|
||||
})
|
||||
|
||||
tape('destroy no pipe', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream('alsdkfja;lsdkfja;lsdk')
|
||||
|
||||
bl.destroy()
|
||||
|
||||
t.equal(bl._bufs.length, 0)
|
||||
t.equal(bl.length, 0)
|
||||
})
|
||||
|
||||
tape('destroy with error', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream('alsdkfja;lsdkfja;lsdk')
|
||||
const err = new Error('kaboom')
|
||||
|
||||
bl.destroy(err)
|
||||
bl.on('error', function (_err) {
|
||||
t.equal(_err, err)
|
||||
})
|
||||
|
||||
t.equal(bl._bufs.length, 0)
|
||||
t.equal(bl.length, 0)
|
||||
})
|
||||
|
||||
!process.browser && tape('destroy with pipe before read end', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream()
|
||||
fs.createReadStream(path.join(__dirname, '/test.js'))
|
||||
.pipe(bl)
|
||||
|
||||
bl.destroy()
|
||||
|
||||
t.equal(bl._bufs.length, 0)
|
||||
t.equal(bl.length, 0)
|
||||
})
|
||||
|
||||
!process.browser && tape('destroy with pipe before read end with race', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream()
|
||||
|
||||
fs.createReadStream(path.join(__dirname, '/test.js'))
|
||||
.pipe(bl)
|
||||
|
||||
setTimeout(function () {
|
||||
bl.destroy()
|
||||
setTimeout(function () {
|
||||
t.equal(bl._bufs.length, 0)
|
||||
t.equal(bl.length, 0)
|
||||
}, 500)
|
||||
}, 500)
|
||||
})
|
||||
|
||||
!process.browser && tape('destroy with pipe after read end', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream()
|
||||
fs.createReadStream(path.join(__dirname, '/test.js'))
|
||||
.on('end', onEnd)
|
||||
.pipe(bl)
|
||||
|
||||
function onEnd () {
|
||||
bl.destroy()
|
||||
|
||||
t.equal(bl._bufs.length, 0)
|
||||
t.equal(bl.length, 0)
|
||||
}
|
||||
})
|
||||
|
||||
!process.browser && tape('destroy with pipe while writing to a destination', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
/** @type {BufferListStreamWithPrivate} */
|
||||
const bl = new BufferListStream()
|
||||
const ds = new BufferListStream()
|
||||
|
||||
fs.createReadStream(path.join(__dirname, '/test.js'))
|
||||
.on('end', onEnd)
|
||||
.pipe(bl)
|
||||
|
||||
function onEnd () {
|
||||
bl.pipe(ds)
|
||||
|
||||
setTimeout(function () {
|
||||
bl.destroy()
|
||||
|
||||
t.equals(bl._bufs.length, 0)
|
||||
t.equals(bl.length, 0)
|
||||
|
||||
ds.destroy()
|
||||
|
||||
t.equals(bl._bufs.length, 0)
|
||||
t.equals(bl.length, 0)
|
||||
}, 100)
|
||||
}
|
||||
})
|
||||
|
||||
!process.browser && tape('handle error', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
fs.createReadStream('/does/not/exist').pipe(BufferListStream(function (err, data) {
|
||||
t.ok(err instanceof Error, 'has error')
|
||||
t.notOk(data, 'no data')
|
||||
}))
|
||||
})
|
||||
Reference in New Issue
Block a user