mirror of
https://github.com/JasonYANG170/IOTConnect-Web.git
synced 2024-11-24 04:36:31 +00:00
181 lines
6.9 KiB
JavaScript
181 lines
6.9 KiB
JavaScript
// Copyright Joyent, Inc. and other Node contributors.
|
|
//
|
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
|
// copy of this software and associated documentation files (the
|
|
// "Software"), to deal in the Software without restriction, including
|
|
// without limitation the rights to use, copy, modify, merge, publish,
|
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
|
// persons to whom the Software is furnished to do so, subject to the
|
|
// following conditions:
|
|
//
|
|
// The above copyright notice and this permission notice shall be included
|
|
// in all copies or substantial portions of the Software.
|
|
//
|
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
// a transform stream is a readable/writable stream where you do
|
|
// something with the data. Sometimes it's called a "filter",
|
|
// but that's not a great name for it, since that implies a thing where
|
|
// some bits pass through, and others are simply ignored. (That would
|
|
// be a valid example of a transform, of course.)
|
|
//
|
|
// While the output is causally related to the input, it's not a
|
|
// necessarily symmetric or synchronous transformation. For example,
|
|
// a zlib stream might take multiple plain-text writes(), and then
|
|
// emit a single compressed chunk some time in the future.
|
|
//
|
|
// Here's how this works:
|
|
//
|
|
// The Transform stream has all the aspects of the readable and writable
|
|
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
|
// internally, and returns false if there's a lot of pending writes
|
|
// buffered up. When you call read(), that calls _read(n) until
|
|
// there's enough pending readable data buffered up.
|
|
//
|
|
// In a transform stream, the written data is placed in a buffer. When
|
|
// _read(n) is called, it transforms the queued up data, calling the
|
|
// buffered _write cb's as it consumes chunks. If consuming a single
|
|
// written chunk would result in multiple output chunks, then the first
|
|
// outputted bit calls the readcb, and subsequent chunks just go into
|
|
// the read buffer, and will cause it to emit 'readable' if necessary.
|
|
//
|
|
// This way, back-pressure is actually determined by the reading side,
|
|
// since _read has to be called to start processing a new chunk. However,
|
|
// a pathological inflate type of transform can cause excessive buffering
|
|
// here. For example, imagine a stream where every byte of input is
|
|
// interpreted as an integer from 0-255, and then results in that many
|
|
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
|
// 1kb of data being output. In this case, you could write a very small
|
|
// amount of input, and end up with a very large amount of output. In
|
|
// such a pathological inflating mechanism, there'd be no way to tell
|
|
// the system to stop doing the transform. A single 4MB write could
|
|
// cause the system to run out of memory.
|
|
//
|
|
// However, even in such a pathological case, only a single written chunk
|
|
// would be consumed, and then the rest would wait (un-transformed) until
|
|
// the results of the previous transformed chunk were consumed.
|
|
|
|
'use strict'
|
|
|
|
const { ObjectSetPrototypeOf, Symbol } = require('../../ours/primordials')
|
|
module.exports = Transform
|
|
const { ERR_METHOD_NOT_IMPLEMENTED } = require('../../ours/errors').codes
|
|
const Duplex = require('./duplex')
|
|
const { getHighWaterMark } = require('./state')
|
|
ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype)
|
|
ObjectSetPrototypeOf(Transform, Duplex)
|
|
const kCallback = Symbol('kCallback')
|
|
function Transform(options) {
|
|
if (!(this instanceof Transform)) return new Transform(options)
|
|
|
|
// TODO (ronag): This should preferably always be
|
|
// applied but would be semver-major. Or even better;
|
|
// make Transform a Readable with the Writable interface.
|
|
const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null
|
|
if (readableHighWaterMark === 0) {
|
|
// A Duplex will buffer both on the writable and readable side while
|
|
// a Transform just wants to buffer hwm number of elements. To avoid
|
|
// buffering twice we disable buffering on the writable side.
|
|
options = {
|
|
...options,
|
|
highWaterMark: null,
|
|
readableHighWaterMark,
|
|
// TODO (ronag): 0 is not optimal since we have
|
|
// a "bug" where we check needDrain before calling _write and not after.
|
|
// Refs: https://github.com/nodejs/node/pull/32887
|
|
// Refs: https://github.com/nodejs/node/pull/35941
|
|
writableHighWaterMark: options.writableHighWaterMark || 0
|
|
}
|
|
}
|
|
Duplex.call(this, options)
|
|
|
|
// We have implemented the _read method, and done the other things
|
|
// that Readable wants before the first _read call, so unset the
|
|
// sync guard flag.
|
|
this._readableState.sync = false
|
|
this[kCallback] = null
|
|
if (options) {
|
|
if (typeof options.transform === 'function') this._transform = options.transform
|
|
if (typeof options.flush === 'function') this._flush = options.flush
|
|
}
|
|
|
|
// When the writable side finishes, then flush out anything remaining.
|
|
// Backwards compat. Some Transform streams incorrectly implement _final
|
|
// instead of or in addition to _flush. By using 'prefinish' instead of
|
|
// implementing _final we continue supporting this unfortunate use case.
|
|
this.on('prefinish', prefinish)
|
|
}
|
|
function final(cb) {
|
|
if (typeof this._flush === 'function' && !this.destroyed) {
|
|
this._flush((er, data) => {
|
|
if (er) {
|
|
if (cb) {
|
|
cb(er)
|
|
} else {
|
|
this.destroy(er)
|
|
}
|
|
return
|
|
}
|
|
if (data != null) {
|
|
this.push(data)
|
|
}
|
|
this.push(null)
|
|
if (cb) {
|
|
cb()
|
|
}
|
|
})
|
|
} else {
|
|
this.push(null)
|
|
if (cb) {
|
|
cb()
|
|
}
|
|
}
|
|
}
|
|
function prefinish() {
|
|
if (this._final !== final) {
|
|
final.call(this)
|
|
}
|
|
}
|
|
Transform.prototype._final = final
|
|
Transform.prototype._transform = function (chunk, encoding, callback) {
|
|
throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()')
|
|
}
|
|
Transform.prototype._write = function (chunk, encoding, callback) {
|
|
const rState = this._readableState
|
|
const wState = this._writableState
|
|
const length = rState.length
|
|
this._transform(chunk, encoding, (err, val) => {
|
|
if (err) {
|
|
callback(err)
|
|
return
|
|
}
|
|
if (val != null) {
|
|
this.push(val)
|
|
}
|
|
if (
|
|
wState.ended ||
|
|
// Backwards compat.
|
|
length === rState.length ||
|
|
// Backwards compat.
|
|
rState.length < rState.highWaterMark
|
|
) {
|
|
callback()
|
|
} else {
|
|
this[kCallback] = callback
|
|
}
|
|
})
|
|
}
|
|
Transform.prototype._read = function () {
|
|
if (this[kCallback]) {
|
|
const callback = this[kCallback]
|
|
this[kCallback] = null
|
|
callback()
|
|
}
|
|
}
|