Import Upstream version 3.1.6+~cs8.7.18
This commit is contained in:
commit
dca77e4a71
|
@ -0,0 +1,4 @@
|
|||
.*.swp
|
||||
node_modules
|
||||
.nyc_output/
|
||||
coverage/
|
|
@ -0,0 +1,5 @@
|
|||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- 12
|
||||
- 10
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) npm, Inc. and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,613 @@
|
|||
# minipass
|
||||
|
||||
A _very_ minimal implementation of a [PassThrough
|
||||
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
|
||||
|
||||
[It's very
|
||||
fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
|
||||
for objects, strings, and buffers.
|
||||
|
||||
Supports `pipe()`ing (including multi-`pipe()` and backpressure transmission),
|
||||
buffering data until either a `data` event handler or `pipe()` is added (so
|
||||
you don't lose the first chunk), and most other cases where PassThrough is
|
||||
a good idea.
|
||||
|
||||
There is a `read()` method, but it's much more efficient to consume data
|
||||
from this stream via `'data'` events or by calling `pipe()` into some other
|
||||
stream. Calling `read()` requires the buffer to be flattened in some
|
||||
cases, which requires copying memory.
|
||||
|
||||
There is also no `unpipe()` method. Once you start piping, there is no
|
||||
stopping it!
|
||||
|
||||
If you set `objectMode: true` in the options, then whatever is written will
|
||||
be emitted. Otherwise, it'll do a minimal amount of Buffer copying to
|
||||
ensure proper Streams semantics when `read(n)` is called.
|
||||
|
||||
`objectMode` can also be set by doing `stream.objectMode = true`, or by
|
||||
writing any non-string/non-buffer data. `objectMode` cannot be set to
|
||||
false once it is set.
|
||||
|
||||
This is not a `through` or `through2` stream. It doesn't transform the
|
||||
data, it just passes it right through. If you want to transform the data,
|
||||
extend the class, and override the `write()` method. Once you're done
|
||||
transforming the data however you want, call `super.write()` with the
|
||||
transform output.
|
||||
|
||||
For some examples of streams that extend Minipass in various ways, check
|
||||
out:
|
||||
|
||||
- [minizlib](http://npm.im/minizlib)
|
||||
- [fs-minipass](http://npm.im/fs-minipass)
|
||||
- [tar](http://npm.im/tar)
|
||||
- [minipass-collect](http://npm.im/minipass-collect)
|
||||
- [minipass-flush](http://npm.im/minipass-flush)
|
||||
- [minipass-pipeline](http://npm.im/minipass-pipeline)
|
||||
- [tap](http://npm.im/tap)
|
||||
- [tap-parser](http://npm.im/tap-parser)
|
||||
- [treport](http://npm.im/treport)
|
||||
- [minipass-fetch](http://npm.im/minipass-fetch)
|
||||
- [pacote](http://npm.im/pacote)
|
||||
- [make-fetch-happen](http://npm.im/make-fetch-happen)
|
||||
- [cacache](http://npm.im/cacache)
|
||||
- [ssri](http://npm.im/ssri)
|
||||
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
|
||||
- [minipass-json-stream](http://npm.im/minipass-json-stream)
|
||||
- [minipass-sized](http://npm.im/minipass-sized)
|
||||
|
||||
## Differences from Node.js Streams
|
||||
|
||||
There are several things that make Minipass streams different from (and in
|
||||
some ways superior to) Node.js core streams.
|
||||
|
||||
Please read these caveats if you are familiar with node-core streams and
|
||||
intend to use Minipass streams in your programs.
|
||||
|
||||
### Timing
|
||||
|
||||
Minipass streams are designed to support synchronous use-cases. Thus, data
|
||||
is emitted as soon as it is available, always. It is buffered until read,
|
||||
but no longer. Another way to look at it is that Minipass streams are
|
||||
exactly as synchronous as the logic that writes into them.
|
||||
|
||||
This can be surprising if your code relies on `PassThrough.write()` always
|
||||
providing data on the next tick rather than the current one, or being able
|
||||
to call `resume()` and not have the entire buffer disappear immediately.
|
||||
|
||||
However, without this synchronicity guarantee, there would be no way for
|
||||
Minipass to achieve the speeds it does, or support the synchronous use
|
||||
cases that it does. Simply put, waiting takes time.
|
||||
|
||||
This non-deferring approach makes Minipass streams much easier to reason
|
||||
about, especially in the context of Promises and other flow-control
|
||||
mechanisms.
|
||||
|
||||
### No High/Low Water Marks
|
||||
|
||||
Node.js core streams will optimistically fill up a buffer, returning `true`
|
||||
on all writes until the limit is hit, even if the data has nowhere to go.
|
||||
Then, they will not attempt to draw more data in until the buffer size dips
|
||||
below a minimum value.
|
||||
|
||||
Minipass streams are much simpler. The `write()` method will return `true`
|
||||
if the data has somewhere to go (which is to say, given the timing
|
||||
guarantees, that the data is already there by the time `write()` returns).
|
||||
|
||||
If the data has nowhere to go, then `write()` returns false, and the data
|
||||
sits in a buffer, to be drained out immediately as soon as anyone consumes
|
||||
it.
|
||||
|
||||
### Hazards of Buffering (or: Why Minipass Is So Fast)
|
||||
|
||||
Since data written to a Minipass stream is immediately written all the way
|
||||
through the pipeline, and `write()` always returns true/false based on
|
||||
whether the data was fully flushed, backpressure is communicated
|
||||
immediately to the upstream caller. This minimizes buffering.
|
||||
|
||||
Consider this case:
|
||||
|
||||
```js
|
||||
const {PassThrough} = require('stream')
|
||||
const p1 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p2 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p3 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p4 = new PassThrough({ highWaterMark: 1024 })
|
||||
|
||||
p1.pipe(p2).pipe(p3).pipe(p4)
|
||||
p4.on('data', () => console.log('made it through'))
|
||||
|
||||
// this returns false and buffers, then writes to p2 on next tick (1)
|
||||
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
|
||||
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
|
||||
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
|
||||
// on next tick (4)
|
||||
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
|
||||
// 'drain' on next tick (5)
|
||||
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
|
||||
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
|
||||
// tick (7)
|
||||
|
||||
p1.write(Buffer.alloc(2048)) // returns false
|
||||
```
|
||||
|
||||
Along the way, the data was buffered and deferred at each stage, and
|
||||
multiple event deferrals happened, for an unblocked pipeline where it was
|
||||
perfectly safe to write all the way through!
|
||||
|
||||
Furthermore, setting a `highWaterMark` of `1024` might lead someone reading
|
||||
the code to think an advisory maximum of 1KiB is being set for the
|
||||
pipeline. However, the actual advisory buffering level is the _sum_ of
|
||||
`highWaterMark` values, since each one has its own bucket.
|
||||
|
||||
Consider the Minipass case:
|
||||
|
||||
```js
|
||||
const m1 = new Minipass()
|
||||
const m2 = new Minipass()
|
||||
const m3 = new Minipass()
|
||||
const m4 = new Minipass()
|
||||
|
||||
m1.pipe(m2).pipe(m3).pipe(m4)
|
||||
m4.on('data', () => console.log('made it through'))
|
||||
|
||||
// m1 is flowing, so it writes the data to m2 immediately
|
||||
// m2 is flowing, so it writes the data to m3 immediately
|
||||
// m3 is flowing, so it writes the data to m4 immediately
|
||||
// m4 is flowing, so it fires the 'data' event immediately, returns true
|
||||
// m4's write returned true, so m3 is still flowing, returns true
|
||||
// m3's write returned true, so m2 is still flowing, returns true
|
||||
// m2's write returned true, so m1 is still flowing, returns true
|
||||
// No event deferrals or buffering along the way!
|
||||
|
||||
m1.write(Buffer.alloc(2048)) // returns true
|
||||
```
|
||||
|
||||
It is extremely unlikely that you _don't_ want to buffer any data written,
|
||||
or _ever_ buffer data that can be flushed all the way through. Neither
|
||||
node-core streams nor Minipass ever fail to buffer written data, but
|
||||
node-core streams do a lot of unnecessary buffering and pausing.
|
||||
|
||||
As always, the faster implementation is the one that does less stuff and
|
||||
waits less time to do it.
|
||||
|
||||
### Immediately emit `end` for empty streams (when not paused)
|
||||
|
||||
If a stream is not paused, and `end()` is called before writing any data
|
||||
into it, then it will emit `end` immediately.
|
||||
|
||||
If you have logic that occurs on the `end` event which you don't want to
|
||||
potentially happen immediately (for example, closing file descriptors,
|
||||
moving on to the next entry in an archive parse stream, etc.) then be sure
|
||||
to call `stream.pause()` on creation, and then `stream.resume()` once you
|
||||
are ready to respond to the `end` event.
|
||||
|
||||
### Emit `end` When Asked
|
||||
|
||||
One hazard of immediately emitting `'end'` is that you may not yet have had
|
||||
a chance to add a listener. In order to avoid this hazard, Minipass
|
||||
streams safely re-emit the `'end'` event if a new listener is added after
|
||||
`'end'` has been emitted.
|
||||
|
||||
Ie, if you do `stream.on('end', someFunction)`, and the stream has already
|
||||
emitted `end`, then it will call the handler right away. (You can think of
|
||||
this somewhat like attaching a new `.then(fn)` to a previously-resolved
|
||||
Promise.)
|
||||
|
||||
To prevent calling handlers multiple times who would not expect multiple
|
||||
ends to occur, all listeners are removed from the `'end'` event whenever it
|
||||
is emitted.
|
||||
|
||||
### Impact of "immediate flow" on Tee-streams
|
||||
|
||||
A "tee stream" is a stream piping to multiple destinations:
|
||||
|
||||
```js
|
||||
const tee = new Minipass()
|
||||
t.pipe(dest1)
|
||||
t.pipe(dest2)
|
||||
t.write('foo') // goes to both destinations
|
||||
```
|
||||
|
||||
Since Minipass streams _immediately_ process any pending data through the
|
||||
pipeline when a new pipe destination is added, this can have surprising
|
||||
effects, especially when a stream comes in from some other function and may
|
||||
or may not have data in its buffer.
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
|
||||
src.pipe(dest2) // gets nothing!
|
||||
```
|
||||
|
||||
The solution is to create a dedicated tee-stream junction that pipes to
|
||||
both locations, and then pipe to _that_ instead.
|
||||
|
||||
```js
|
||||
// Safe example: tee to both places
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.pipe(dest1)
|
||||
tee.pipe(dest2)
|
||||
src.pipe(tee) // tee gets 'foo', pipes to both locations
|
||||
```
|
||||
|
||||
The same caveat applies to `on('data')` event listeners. The first one
|
||||
added will _immediately_ receive all of the data, leaving nothing for the
|
||||
second:
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.on('data', handler1) // receives 'foo' right away
|
||||
src.on('data', handler2) // nothing to see here!
|
||||
```
|
||||
|
||||
Using a dedicated tee-stream can be used in this case as well:
|
||||
|
||||
```js
|
||||
// Safe example: tee to both data handlers
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.on('data', handler1)
|
||||
tee.on('data', handler2)
|
||||
src.pipe(tee)
|
||||
```
|
||||
|
||||
## USAGE
|
||||
|
||||
It's a stream! Use it like a stream and it'll most likely do what you
|
||||
want.
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const mp = new Minipass(options) // optional: { encoding, objectMode }
|
||||
mp.write('foo')
|
||||
mp.pipe(someOtherStream)
|
||||
mp.end('bar')
|
||||
```
|
||||
|
||||
### OPTIONS
|
||||
|
||||
* `encoding` How would you like the data coming _out_ of the stream to be
|
||||
encoded? Accepts any values that can be passed to `Buffer.toString()`.
|
||||
* `objectMode` Emit data exactly as it comes in. This will be flipped on
|
||||
by default if you write() something other than a string or Buffer at any
|
||||
point. Setting `objectMode: true` will prevent setting any encoding
|
||||
value.
|
||||
|
||||
### API
|
||||
|
||||
Implements the user-facing portions of Node.js's `Readable` and `Writable`
|
||||
streams.
|
||||
|
||||
### Methods
|
||||
|
||||
* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the
|
||||
base Minipass class, the same data will come out.) Returns `false` if
|
||||
the stream will buffer the next write, or true if it's still in "flowing"
|
||||
mode.
|
||||
* `end([chunk, [encoding]], [callback])` - Signal that you have no more
|
||||
data to write. This will queue an `end` event to be fired when all the
|
||||
data has been consumed.
|
||||
* `setEncoding(encoding)` - Set the encoding for data coming of the stream.
|
||||
This can only be done once.
|
||||
* `pause()` - No more data for a while, please. This also prevents `end`
|
||||
from being emitted for empty streams until the stream is resumed.
|
||||
* `resume()` - Resume the stream. If there's data in the buffer, it is all
|
||||
discarded. Any buffered events are immediately emitted.
|
||||
* `pipe(dest)` - Send all output to the stream provided. There is no way
|
||||
to unpipe. When data is emitted, it is immediately written to any and
|
||||
all pipe destinations.
|
||||
* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some
|
||||
events are given special treatment, however. (See below under "events".)
|
||||
* `promise()` - Returns a Promise that resolves when the stream emits
|
||||
`end`, or rejects if the stream emits `error`.
|
||||
* `collect()` - Return a Promise that resolves on `end` with an array
|
||||
containing each chunk of data that was emitted, or rejects if the stream
|
||||
emits `error`. Note that this consumes the stream data.
|
||||
* `concat()` - Same as `collect()`, but concatenates the data into a single
|
||||
Buffer object. Will reject the returned promise if the stream is in
|
||||
objectMode, or if it goes into objectMode by the end of the data.
|
||||
* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not
|
||||
provided, then consume all of it. If `n` bytes are not available, then
|
||||
it returns null. **Note** consuming streams in this way is less
|
||||
efficient, and can lead to unnecessary Buffer copying.
|
||||
* `destroy([er])` - Destroy the stream. If an error is provided, then an
|
||||
`'error'` event is emitted. If the stream has a `close()` method, and
|
||||
has not emitted a `'close'` event yet, then `stream.close()` will be
|
||||
called. Any Promises returned by `.promise()`, `.collect()` or
|
||||
`.concat()` will be rejected. After being destroyed, writing to the
|
||||
stream will emit an error. No more data will be emitted if the stream is
|
||||
destroyed, even if it was previously buffered.
|
||||
|
||||
### Properties
|
||||
|
||||
* `bufferLength` Read-only. Total number of bytes buffered, or in the case
|
||||
of objectMode, the total number of objects.
|
||||
* `encoding` The encoding that has been set. (Setting this is equivalent
|
||||
to calling `setEncoding(enc)` and has the same prohibition against
|
||||
setting multiple times.)
|
||||
* `flowing` Read-only. Boolean indicating whether a chunk written to the
|
||||
stream will be immediately emitted.
|
||||
* `emittedEnd` Read-only. Boolean indicating whether the end-ish events
|
||||
(ie, `end`, `prefinish`, `finish`) have been emitted. Note that
|
||||
listening on any end-ish event will immediateyl re-emit it if it has
|
||||
already been emitted.
|
||||
* `writable` Whether the stream is writable. Default `true`. Set to
|
||||
`false` when `end()`
|
||||
* `readable` Whether the stream is readable. Default `true`.
|
||||
* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written
|
||||
to the stream that have not yet been emitted. (It's probably a bad idea
|
||||
to mess with this.)
|
||||
* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that
|
||||
this stream is piping into. (It's probably a bad idea to mess with
|
||||
this.)
|
||||
* `destroyed` A getter that indicates whether the stream was destroyed.
|
||||
* `paused` True if the stream has been explicitly paused, otherwise false.
|
||||
* `objectMode` Indicates whether the stream is in `objectMode`. Once set
|
||||
to `true`, it cannot be set to `false`.
|
||||
|
||||
### Events
|
||||
|
||||
* `data` Emitted when there's data to read. Argument is the data to read.
|
||||
This is never emitted while not flowing. If a listener is attached, that
|
||||
will resume the stream.
|
||||
* `end` Emitted when there's no more data to read. This will be emitted
|
||||
immediately for empty streams when `end()` is called. If a listener is
|
||||
attached, and `end` was already emitted, then it will be emitted again.
|
||||
All listeners are removed when `end` is emitted.
|
||||
* `prefinish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'end'`.
|
||||
* `finish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'prefinish'`.
|
||||
* `close` An indication that an underlying resource has been released.
|
||||
Minipass does not emit this event, but will defer it until after `end`
|
||||
has been emitted, since it throws off some stream libraries otherwise.
|
||||
* `drain` Emitted when the internal buffer empties, and it is again
|
||||
suitable to `write()` into the stream.
|
||||
* `readable` Emitted when data is buffered and ready to be read by a
|
||||
consumer.
|
||||
* `resume` Emitted when stream changes state from buffering to flowing
|
||||
mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event
|
||||
listener is added.)
|
||||
|
||||
### Static Methods
|
||||
|
||||
* `Minipass.isStream(stream)` Returns `true` if the argument is a stream,
|
||||
and false otherwise. To be considered a stream, the object must be
|
||||
either an instance of Minipass, or an EventEmitter that has either a
|
||||
`pipe()` method, or both `write()` and `end()` methods. (Pretty much any
|
||||
stream in node-land will return `true` for this.)
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Here are some examples of things you can do with Minipass streams.
|
||||
|
||||
### simple "are you done yet" promise
|
||||
|
||||
```js
|
||||
mp.promise().then(() => {
|
||||
// stream is finished
|
||||
}, er => {
|
||||
// stream emitted an error
|
||||
})
|
||||
```
|
||||
|
||||
### collecting
|
||||
|
||||
```js
|
||||
mp.collect().then(all => {
|
||||
// all is an array of all the data emitted
|
||||
// encoding is supported in this case, so
|
||||
// so the result will be a collection of strings if
|
||||
// an encoding is specified, or buffers/objects if not.
|
||||
//
|
||||
// In an async function, you may do
|
||||
// const data = await stream.collect()
|
||||
})
|
||||
```
|
||||
|
||||
### collecting into a single blob
|
||||
|
||||
This is a bit slower because it concatenates the data into one chunk for
|
||||
you, but if you're going to do it yourself anyway, it's convenient this
|
||||
way:
|
||||
|
||||
```js
|
||||
mp.concat().then(onebigchunk => {
|
||||
// onebigchunk is a string if the stream
|
||||
// had an encoding set, or a buffer otherwise.
|
||||
})
|
||||
```
|
||||
|
||||
### iteration
|
||||
|
||||
You can iterate over streams synchronously or asynchronously in platforms
|
||||
that support it.
|
||||
|
||||
Synchronous iteration will end when the currently available data is
|
||||
consumed, even if the `end` event has not been reached. In string and
|
||||
buffer mode, the data is concatenated, so unless multiple writes are
|
||||
occurring in the same tick as the `read()`, sync iteration loops will
|
||||
generally only have a single iteration.
|
||||
|
||||
To consume chunks in this way exactly as they have been written, with no
|
||||
flattening, create the stream with the `{ objectMode: true }` option.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // a, b
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // c, d
|
||||
}
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // nothing
|
||||
}
|
||||
```
|
||||
|
||||
Asynchronous iteration will continue until the end event is reached,
|
||||
consuming all of the data.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ encoding: 'utf8' })
|
||||
|
||||
// some source of some data
|
||||
let i = 5
|
||||
const inter = setInterval(() => {
|
||||
if (i-- > 0)
|
||||
mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end()
|
||||
clearInterval(inter)
|
||||
}
|
||||
}, 100)
|
||||
|
||||
// consume the data with asynchronous iteration
|
||||
async function consume () {
|
||||
for await (let chunk of mp) {
|
||||
console.log(chunk)
|
||||
}
|
||||
return 'ok'
|
||||
}
|
||||
|
||||
consume().then(res => console.log(res))
|
||||
// logs `foo\n` 5 times, and then `ok`
|
||||
```
|
||||
|
||||
### subclass that `console.log()`s everything written into it
|
||||
|
||||
```js
|
||||
class Logger extends Minipass {
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}
|
||||
|
||||
someSource.pipe(new Logger()).pipe(someDest)
|
||||
```
|
||||
|
||||
### same thing, but using an inline anonymous class
|
||||
|
||||
```js
|
||||
// js classes are fun
|
||||
someSource
|
||||
.pipe(new (class extends Minipass {
|
||||
emit (ev, ...data) {
|
||||
// let's also log events, because debugging some weird thing
|
||||
console.log('EMIT', ev)
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}))
|
||||
.pipe(someDest)
|
||||
```
|
||||
|
||||
### subclass that defers 'end' for some reason
|
||||
|
||||
```js
|
||||
class SlowEnd extends Minipass {
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'end') {
|
||||
console.log('going to end, hold on a sec')
|
||||
setTimeout(() => {
|
||||
console.log('ok, ready to end now')
|
||||
super.emit('end', ...args)
|
||||
}, 100)
|
||||
} else {
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that creates newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONEncode extends Minipass {
|
||||
write (obj, cb) {
|
||||
try {
|
||||
// JSON.stringify can throw, emit an error on that
|
||||
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
end (obj, cb) {
|
||||
if (typeof obj === 'function') {
|
||||
cb = obj
|
||||
obj = undefined
|
||||
}
|
||||
if (obj !== undefined) {
|
||||
this.write(obj)
|
||||
}
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that parses newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONDecode extends Minipass {
|
||||
constructor (options) {
|
||||
// always be in object mode, as far as Minipass is concerned
|
||||
super({ objectMode: true })
|
||||
this._jsonBuffer = ''
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'string' &&
|
||||
typeof encoding === 'string' &&
|
||||
encoding !== 'utf8') {
|
||||
chunk = Buffer.from(chunk, encoding).toString()
|
||||
} else if (Buffer.isBuffer(chunk))
|
||||
chunk = chunk.toString()
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
}
|
||||
const jsonData = (this._jsonBuffer + chunk).split('\n')
|
||||
this._jsonBuffer = jsonData.pop()
|
||||
for (let i = 0; i < jsonData.length; i++) {
|
||||
try {
|
||||
// JSON.parse can throw, emit an error on that
|
||||
super.write(JSON.parse(jsonData[i]))
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cb)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
```
|
|
@ -0,0 +1,11 @@
|
|||
'use strict'
|
||||
const MiniPass = require('../..')
|
||||
|
||||
module.exports = class ExtendMiniPass extends MiniPass {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
}
|
||||
write (data, encoding) {
|
||||
return super.write(data, encoding)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict'
|
||||
const through2 = require('through2')
|
||||
module.exports = function (opt) {
|
||||
return opt.objectMode
|
||||
? through2.obj(func)
|
||||
: through2(func)
|
||||
|
||||
function func (data, enc, done) {
|
||||
this.push(data, enc)
|
||||
done()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
'use strict'
|
||||
const stream = require('stream')
|
||||
module.exports = class ExtendTransform extends stream.Transform {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
}
|
||||
_transform (data, enc, done) {
|
||||
this.push(data, enc)
|
||||
done()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict'
|
||||
const EE = require('events').EventEmitter
|
||||
|
||||
module.exports = class NullSink extends EE {
|
||||
write (data, encoding, next) {
|
||||
if (next) next()
|
||||
return true
|
||||
}
|
||||
end () {
|
||||
this.emit('finish')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
'use strict'
|
||||
const stream = require('stream')
|
||||
|
||||
const numbers = new Array(1000).join(',').split(',').map((v, k) => k)
|
||||
let acc = ''
|
||||
const strings = numbers.map(n => acc += n)
|
||||
const bufs = strings.map(s => new Buffer(s))
|
||||
const objs = strings.map(s => ({ str: s }))
|
||||
|
||||
module.exports = class Numbers {
|
||||
constructor (opt) {
|
||||
this.objectMode = opt.objectMode
|
||||
this.encoding = opt.encoding
|
||||
this.ii = 0
|
||||
this.done = false
|
||||
}
|
||||
pipe (dest) {
|
||||
this.dest = dest
|
||||
this.go()
|
||||
return dest
|
||||
}
|
||||
|
||||
go () {
|
||||
let flowing = true
|
||||
while (flowing) {
|
||||
if (this.ii >= 1000) {
|
||||
this.dest.end()
|
||||
this.done = true
|
||||
flowing = false
|
||||
} else {
|
||||
flowing = this.dest.write(
|
||||
(this.objectMode ? objs
|
||||
: this.encoding ? strings
|
||||
: bufs)[this.ii++])
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.done)
|
||||
this.dest.once('drain', _ => this.go())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
'use strict'
|
||||
module.exports = _ => {
|
||||
const start = process.hrtime()
|
||||
return _ => {
|
||||
const end = process.hrtime(start)
|
||||
const ms = Math.round(end[0]*1e6 + end[1]/1e3)/1e3
|
||||
if (!process.env.isTTY)
|
||||
console.log(ms)
|
||||
else {
|
||||
const s = Math.round(end[0]*10 + end[1]/1e8)/10
|
||||
const ss = s <= 1 ? '' : ' (' + s + 's)'
|
||||
console.log('%d%s', ms, ss)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
'use strict'
|
||||
|
||||
const iterations = +process.env.BENCH_TEST_ITERATION || 100
|
||||
const testCount = +process.env.BENCH_TEST_COUNT || 20
|
||||
|
||||
const tests = [
|
||||
'baseline',
|
||||
'minipass',
|
||||
'extend-minipass',
|
||||
'through2',
|
||||
'extend-through2',
|
||||
'passthrough',
|
||||
'extend-transform'
|
||||
]
|
||||
|
||||
const manyOpts = [ 'many', 'single' ]
|
||||
const typeOpts = [ 'buffer', 'string', 'object' ]
|
||||
|
||||
const main = () => {
|
||||
const spawn = require('child_process').spawn
|
||||
const node = process.execPath
|
||||
|
||||
const results = {}
|
||||
|
||||
const testSet = []
|
||||
tests.forEach(t =>
|
||||
manyOpts.forEach(many =>
|
||||
typeOpts.forEach(type =>
|
||||
new Array(testCount).join(',').split(',').forEach(() =>
|
||||
t !== 'baseline' || (many === 'single' && type === 'object')
|
||||
? testSet.push([t, many, type]) : null))))
|
||||
|
||||
let didFirst = false
|
||||
const mainRunTest = t => {
|
||||
if (!t)
|
||||
return afterMain(results)
|
||||
|
||||
const k = t.join('\t')
|
||||
if (!results[k]) {
|
||||
results[k] = []
|
||||
if (!didFirst)
|
||||
didFirst = true
|
||||
else
|
||||
process.stderr.write('\n')
|
||||
|
||||
process.stderr.write(k + ' #')
|
||||
} else {
|
||||
process.stderr.write('#')
|
||||
}
|
||||
|
||||
const c = spawn(node, [__filename].concat(t), {
|
||||
stdio: [ 'ignore', 'pipe', 2 ]
|
||||
})
|
||||
let out = ''
|
||||
c.stdout.on('data', c => out += c)
|
||||
c.on('close', (code, signal) => {
|
||||
if (code || signal)
|
||||
throw new Error('failed: ' + code + ' ' + signal)
|
||||
results[k].push(+out)
|
||||
mainRunTest(testSet.shift())
|
||||
})
|
||||
}
|
||||
|
||||
mainRunTest(testSet.shift())
|
||||
}
|
||||
|
||||
const afterMain = results => {
|
||||
console.log('test\tmany\ttype\tops/s\tmean\tmedian\tmax\tmin' +
|
||||
'\tstdev\trange\traw')
|
||||
// get the mean, median, stddev, and range of each test
|
||||
Object.keys(results).forEach(test => {
|
||||
const k = results[test].sort((a, b) => a - b)
|
||||
const min = k[0]
|
||||
const max = k[ k.length - 1 ]
|
||||
const range = max - min
|
||||
const sum = k.reduce((a,b) => a + b, 0)
|
||||
const mean = sum / k.length
|
||||
const ops = iterations / mean * 1000
|
||||
const devs = k.map(n => n - mean).map(n => n * n)
|
||||
const avgdev = devs.reduce((a,b) => a + b, 0) / k.length
|
||||
const stdev = Math.pow(avgdev, 0.5)
|
||||
const median = k.length % 2 ? k[Math.floor(k.length / 2)] :
|
||||
(k[k.length/2] + k[k.length/2+1])/2
|
||||
console.log(
|
||||
'%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%s', test, round(ops),
|
||||
round(mean), round(median),
|
||||
max, min, round(stdev), round(range),
|
||||
k.join('\t'))
|
||||
})
|
||||
}
|
||||
|
||||
const round = num => Math.round(num * 1000)/1000
|
||||
|
||||
const test = (testname, many, type) => {
|
||||
const timer = require('./lib/timer.js')
|
||||
const Class = getClass(testname)
|
||||
|
||||
const done = timer()
|
||||
runTest(Class, many, type, iterations, done)
|
||||
}
|
||||
|
||||
// don't blow up the stack! loop unless deferred
|
||||
const runTest = (Class, many, type, iterations, done) => {
|
||||
const Nullsink = require('./lib/nullsink.js')
|
||||
const Numbers = require('./lib/numbers.js')
|
||||
const opt = {}
|
||||
if (type === 'string')
|
||||
opt.encoding = 'utf8'
|
||||
else if (type === 'object')
|
||||
opt.objectMode = true
|
||||
|
||||
while (iterations--) {
|
||||
let finished = false
|
||||
let inloop = true
|
||||
const after = iterations === 0 ? done
|
||||
: () => {
|
||||
if (iterations === 0)
|
||||
done()
|
||||
else if (inloop)
|
||||
finished = true
|
||||
else
|
||||
runTest(Class, many, type, iterations, done)
|
||||
}
|
||||
|
||||
const out = new Nullsink().on('finish', after)
|
||||
let sink = Class ? new Class(opt) : out
|
||||
|
||||
if (many && Class)
|
||||
sink = sink
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
|
||||
if (sink !== out)
|
||||
sink.pipe(out)
|
||||
|
||||
new Numbers(opt).pipe(sink)
|
||||
|
||||
// keep tight-looping if the stream is done already
|
||||
if (!finished) {
|
||||
inloop = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getClass = testname =>
|
||||
testname === 'through2' ? require('through2').obj
|
||||
: testname === 'extend-through2' ? require('./lib/extend-through2.js')
|
||||
: testname === 'minipass' ? require('../')
|
||||
: testname === 'extend-minipass' ? require('./lib/extend-minipass.js')
|
||||
: testname === 'passthrough' ? require('stream').PassThrough
|
||||
: testname === 'extend-transform' ? require('./lib/extend-transform.js')
|
||||
: null
|
||||
|
||||
if (!process.argv[2])
|
||||
main()
|
||||
else
|
||||
test(process.argv[2], process.argv[3] === 'many', process.argv[4])
|
|
@ -0,0 +1,560 @@
|
|||
'use strict'
|
||||
const proc = typeof process === 'object' && process ? process : {
|
||||
stdout: null,
|
||||
stderr: null,
|
||||
}
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const Yallist = require('yallist')
|
||||
const SD = require('string_decoder').StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const EMITTED_ERROR = Symbol('emittedError')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|
||||
|| Symbol('asyncIterator not implemented')
|
||||
const ITERATOR = doIter && Symbol.iterator
|
||||
|| Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev =>
|
||||
ev === 'end' ||
|
||||
ev === 'finish' ||
|
||||
ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b => b instanceof ArrayBuffer ||
|
||||
typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
module.exports = class Minipass extends Stream {
|
||||
constructor (options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this.pipes = new Yallist()
|
||||
this.buffer = new Yallist()
|
||||
this[OBJECTMODE] = options && options.objectMode || false
|
||||
if (this[OBJECTMODE])
|
||||
this[ENCODING] = null
|
||||
else
|
||||
this[ENCODING] = options && options.encoding || null
|
||||
if (this[ENCODING] === 'buffer')
|
||||
this[ENCODING] = null
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this[EMITTED_ERROR] = null
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
}
|
||||
|
||||
get bufferLength () { return this[BUFFERLENGTH] }
|
||||
|
||||
get encoding () { return this[ENCODING] }
|
||||
set encoding (enc) {
|
||||
if (this[OBJECTMODE])
|
||||
throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (this[ENCODING] && enc !== this[ENCODING] &&
|
||||
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this.buffer.length)
|
||||
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding (enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode () { return this[OBJECTMODE] }
|
||||
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (this[EOF])
|
||||
throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit('error', Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
))
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk))
|
||||
chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// this ensures at this point that the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!this.objectMode && !chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
if (cb)
|
||||
cb()
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (typeof chunk === 'string' && !this[OBJECTMODE] &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
if (this.flowing) {
|
||||
// if we somehow have something in the buffer, but we think we're
|
||||
// flowing, then we need to flush all that out first, or we get
|
||||
// chunks coming in out of order. Can't emit 'drain' here though,
|
||||
// because we're mid-write, so that'd be bad.
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
|
||||
// if we are still flowing after flushing the buffer we can emit the
|
||||
// chunk otherwise we have to buffer it.
|
||||
this.flowing
|
||||
? this.emit('data', chunk)
|
||||
: this[BUFFERPUSH](chunk)
|
||||
} else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
cb()
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read (n) {
|
||||
if (this[DESTROYED])
|
||||
return null
|
||||
|
||||
try {
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
|
||||
return null
|
||||
|
||||
if (this[OBJECTMODE])
|
||||
n = null
|
||||
|
||||
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding)
|
||||
this.buffer = new Yallist([
|
||||
Array.from(this.buffer).join('')
|
||||
])
|
||||
else
|
||||
this.buffer = new Yallist([
|
||||
Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
|
||||
])
|
||||
}
|
||||
|
||||
return this[READ](n || null, this.buffer.head.value)
|
||||
} finally {
|
||||
this[MAYBE_EMIT_END]()
|
||||
}
|
||||
}
|
||||
|
||||
[READ] (n, chunk) {
|
||||
if (n === chunk.length || n === null)
|
||||
this[BUFFERSHIFT]()
|
||||
else {
|
||||
this.buffer.head.value = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME] () {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this.buffer.length)
|
||||
this[FLUSH]()
|
||||
else if (this[EOF])
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
resume () {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause () {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed () {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing () {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH] (chunk) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] += 1
|
||||
else
|
||||
this[BUFFERLENGTH] += chunk.length
|
||||
return this.buffer.push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT] () {
|
||||
if (this.buffer.length) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] -= 1
|
||||
else
|
||||
this[BUFFERLENGTH] -= this.buffer.head.value.length
|
||||
}
|
||||
return this.buffer.shift()
|
||||
}
|
||||
|
||||
[FLUSH] (noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
|
||||
|
||||
if (!noDrain && !this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK] (chunk) {
|
||||
return chunk ? (this.emit('data', chunk), this.flowing) : false
|
||||
}
|
||||
|
||||
pipe (dest, opts) {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === proc.stdout || dest === proc.stderr)
|
||||
opts.end = false
|
||||
else
|
||||
opts.end = opts.end !== false
|
||||
|
||||
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
|
||||
this.pipes.push(p)
|
||||
|
||||
dest.on('drain', p.ondrain)
|
||||
this[RESUME]()
|
||||
// piping an ended stream ends immediately
|
||||
if (ended && p.opts.end)
|
||||
p.dest.end()
|
||||
return dest
|
||||
}
|
||||
|
||||
addListener (ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on (ev, fn) {
|
||||
try {
|
||||
return super.on(ev, fn)
|
||||
} finally {
|
||||
if (ev === 'data' && !this.pipes.length && !this.flowing)
|
||||
this[RESUME]()
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
} else if (ev === 'error' && this[EMITTED_ERROR]) {
|
||||
fn.call(this, this[EMITTED_ERROR])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get emittedEnd () {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END] () {
|
||||
if (!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this.buffer.length === 0 &&
|
||||
this[EOF]) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED])
|
||||
this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit (ev, data) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
if (!data)
|
||||
return
|
||||
|
||||
if (this.pipes.length)
|
||||
this.pipes.forEach(p =>
|
||||
p.dest.write(data) === false && this.pause())
|
||||
} else if (ev === 'end') {
|
||||
// only actual end gets this treatment
|
||||
if (this[EMITTED_END] === true)
|
||||
return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
|
||||
if (this[DECODER]) {
|
||||
data = this[DECODER].end()
|
||||
if (data) {
|
||||
this.pipes.forEach(p => p.dest.write(data))
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
this.pipes.forEach(p => {
|
||||
p.dest.removeListener('drain', p.ondrain)
|
||||
if (p.opts.end)
|
||||
p.dest.end()
|
||||
})
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||||
return
|
||||
} else if (ev === 'error') {
|
||||
this[EMITTED_ERROR] = data
|
||||
}
|
||||
|
||||
// TODO: replace with a spread operator when Node v4 support drops
|
||||
const args = new Array(arguments.length)
|
||||
args[0] = ev
|
||||
args[1] = data
|
||||
if (arguments.length > 2) {
|
||||
for (let i = 2; i < arguments.length; i++) {
|
||||
args[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return super.emit.apply(this, args)
|
||||
} finally {
|
||||
if (!isEndish(ev))
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.removeAllListeners(ev)
|
||||
}
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect () {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat () {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('error', er => reject(er))
|
||||
this.on('end', () => resolve())
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR] () {
|
||||
const next = () => {
|
||||
const res = this.read()
|
||||
if (res !== null)
|
||||
return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF])
|
||||
return Promise.resolve({ done: true })
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return { next }
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR] () {
|
||||
const next = () => {
|
||||
const value = this.read()
|
||||
const done = value === null
|
||||
return { value, done }
|
||||
}
|
||||
return { next }
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else
|
||||
this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this.buffer = new Yallist()
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED])
|
||||
this.close()
|
||||
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else // if no error to emit, still reject pending promises
|
||||
this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream (s) {
|
||||
return !!s && (s instanceof Minipass || s instanceof Stream ||
|
||||
s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,48 @@
|
|||
# minipass-collect
|
||||
|
||||
A Minipass stream that collects all the data into a single chunk
|
||||
|
||||
Note that this buffers ALL data written to it, so it's only good for
|
||||
situations where you are sure the entire stream fits in memory.
|
||||
|
||||
Note: this is primarily useful for the `Collect.PassThrough` class, since
|
||||
Minipass streams already have a `.collect()` method which returns a promise
|
||||
that resolves to the array of chunks, and a `.concat()` method that returns
|
||||
the data concatenated into a single Buffer or String.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Collect = require('minipass-collect')
|
||||
|
||||
const collector = new Collect()
|
||||
collector.on('data', allTheData => {
|
||||
console.log('all the data!', allTheData)
|
||||
})
|
||||
|
||||
someSourceOfData.pipe(collector)
|
||||
|
||||
// note that you can also simply do:
|
||||
someSourceOfData.pipe(new Minipass()).concat().then(data => ...)
|
||||
// or even, if someSourceOfData is a Minipass:
|
||||
someSourceOfData.concat().then(data => ...)
|
||||
// but you might prefer to have it stream-shaped rather than
|
||||
// Promise-shaped in some scenarios.
|
||||
```
|
||||
|
||||
If you want to collect the data, but _also_ act as a passthrough stream,
|
||||
then use `Collect.PassThrough` instead (for example to memoize streaming
|
||||
responses), and listen on the `collect` event.
|
||||
|
||||
```js
|
||||
const Collect = require('minipass-collect')
|
||||
|
||||
const collector = new Collect.PassThrough()
|
||||
collector.on('collect', allTheData => {
|
||||
console.log('all the data!', allTheData)
|
||||
})
|
||||
|
||||
someSourceOfData.pipe(collector).pipe(someOtherStream)
|
||||
```
|
||||
|
||||
All [minipass options](http://npm.im/minipass) are supported.
|
|
@ -0,0 +1,71 @@
|
|||
const Minipass = require('minipass')
|
||||
const _data = Symbol('_data')
|
||||
const _length = Symbol('_length')
|
||||
class Collect extends Minipass {
|
||||
constructor (options) {
|
||||
super(options)
|
||||
this[_data] = []
|
||||
this[_length] = 0
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const c = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding)
|
||||
this[_data].push(c)
|
||||
this[_length] += c.length
|
||||
if (cb)
|
||||
cb()
|
||||
return true
|
||||
}
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
const result = Buffer.concat(this[_data], this[_length])
|
||||
super.write(result)
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
module.exports = Collect
|
||||
|
||||
// it would be possible to DRY this a bit by doing something like
|
||||
// this.collector = new Collect() and listening on its data event,
|
||||
// but it's not much code, and we may as well save the extra obj
|
||||
class CollectPassThrough extends Minipass {
|
||||
constructor (options) {
|
||||
super(options)
|
||||
this[_data] = []
|
||||
this[_length] = 0
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const c = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding)
|
||||
this[_data].push(c)
|
||||
this[_length] += c.length
|
||||
return super.write(chunk, encoding, cb)
|
||||
}
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
const result = Buffer.concat(this[_data], this[_length])
|
||||
this.emit('collect', result)
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
module.exports.PassThrough = CollectPassThrough
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"name": "minipass-collect",
|
||||
"version": "1.0.2",
|
||||
"description": "A Minipass stream that collects all the data into a single chunk",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
_extends: 'open-source-project-boilerplate'
|
|
@ -0,0 +1,53 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- latest
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [10.0.x, 10.x, 12.0.x, 12.x, 14.0.x, 14.x, 15.x, 16.x]
|
||||
platform:
|
||||
- os: ubuntu-latest
|
||||
shell: bash
|
||||
- os: macos-latest
|
||||
shell: bash
|
||||
- os: windows-latest
|
||||
shell: bash
|
||||
- os: windows-latest
|
||||
shell: cmd
|
||||
- os: windows-latest
|
||||
shell: powershell
|
||||
|
||||
runs-on: ${{ matrix.platform.os }}
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.platform.shell }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v1.1.0
|
||||
|
||||
- name: Use Nodejs ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: npm
|
||||
|
||||
- name: Update npm
|
||||
run: npm i --prefer-online -g npm@latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run Tap Tests
|
||||
run: npm test ${{ matrix.node-version == '10.0.x' && '-- --no-coverage' || '' }}
|
||||
|
||||
- name: List dependencies
|
||||
run: npm ls -a
|
|
@ -0,0 +1,23 @@
|
|||
# ignore most things, include some others
|
||||
/*
|
||||
/.*
|
||||
|
||||
!.github/
|
||||
!bin/
|
||||
!lib/
|
||||
!docs/
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!README.md
|
||||
!CONTRIBUTING.md
|
||||
!LICENSE
|
||||
!CHANGELOG.md
|
||||
!example/
|
||||
!scripts/
|
||||
!tap-snapshots/
|
||||
!test/
|
||||
!.travis.yml
|
||||
!.gitignore
|
||||
!.gitattributes
|
||||
!map.js
|
||||
!index.js
|
|
@ -0,0 +1,28 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Copyright (c) 2016 David Frank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
---
|
||||
|
||||
Note: This is a derivative work based on "node-fetch" by David Frank,
|
||||
modified and distributed under the terms of the MIT license above.
|
||||
https://github.com/bitinn/node-fetch
|
|
@ -0,0 +1,29 @@
|
|||
# minipass-fetch
|
||||
|
||||
An implementation of window.fetch in Node.js using Minipass streams
|
||||
|
||||
This is a fork (or more precisely, a reimplementation) of
|
||||
[node-fetch](http://npm.im/node-fetch). All streams have been replaced
|
||||
with [minipass streams](http://npm.im/minipass).
|
||||
|
||||
The goal of this module is to stay in sync with the API presented by
|
||||
`node-fetch`, with the exception of the streaming interface provided.
|
||||
|
||||
## Why
|
||||
|
||||
Minipass streams are faster and more deterministic in their timing contract
|
||||
than node-core streams, making them a better fit for many server-side use
|
||||
cases.
|
||||
|
||||
## API
|
||||
|
||||
See [node-fetch](http://npm.im/node-fetch)
|
||||
|
||||
Differences from `node-fetch` (and, by extension, from the WhatWG Fetch
|
||||
specification):
|
||||
|
||||
- Returns [minipass](http://npm.im/minipass) streams instead of node-core
|
||||
streams.
|
||||
- Supports the full set of [TLS Options that may be provided to
|
||||
`https.request()`](https://nodejs.org/api/https.html#https_https_request_options_callback)
|
||||
when making `https` requests.
|
|
@ -0,0 +1 @@
|
|||
module.exports = require('./lib/index.js')
|
|
@ -0,0 +1,17 @@
|
|||
'use strict'
|
||||
class AbortError extends Error {
|
||||
constructor (message) {
|
||||
super(message)
|
||||
this.code = 'FETCH_ABORTED'
|
||||
this.type = 'aborted'
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'AbortError'
|
||||
}
|
||||
|
||||
// don't allow name to be overridden, but don't throw either
|
||||
set name (s) {}
|
||||
}
|
||||
module.exports = AbortError
|
|
@ -0,0 +1,97 @@
|
|||
'use strict'
|
||||
const Minipass = require('minipass')
|
||||
const TYPE = Symbol('type')
|
||||
const BUFFER = Symbol('buffer')
|
||||
|
||||
class Blob {
|
||||
constructor (blobParts, options) {
|
||||
this[TYPE] = ''
|
||||
|
||||
const buffers = []
|
||||
let size = 0
|
||||
|
||||
if (blobParts) {
|
||||
const a = blobParts
|
||||
const length = Number(a.length)
|
||||
for (let i = 0; i < length; i++) {
|
||||
const element = a[i]
|
||||
const buffer = element instanceof Buffer ? element
|
||||
: ArrayBuffer.isView(element)
|
||||
? Buffer.from(element.buffer, element.byteOffset, element.byteLength)
|
||||
: element instanceof ArrayBuffer ? Buffer.from(element)
|
||||
: element instanceof Blob ? element[BUFFER]
|
||||
: typeof element === 'string' ? Buffer.from(element)
|
||||
: Buffer.from(String(element))
|
||||
size += buffer.length
|
||||
buffers.push(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
this[BUFFER] = Buffer.concat(buffers, size)
|
||||
|
||||
const type = options && options.type !== undefined
|
||||
&& String(options.type).toLowerCase()
|
||||
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
||||
this[TYPE] = type
|
||||
}
|
||||
}
|
||||
|
||||
get size () {
|
||||
return this[BUFFER].length
|
||||
}
|
||||
|
||||
get type () {
|
||||
return this[TYPE]
|
||||
}
|
||||
|
||||
text () {
|
||||
return Promise.resolve(this[BUFFER].toString())
|
||||
}
|
||||
|
||||
arrayBuffer () {
|
||||
const buf = this[BUFFER]
|
||||
const off = buf.byteOffset
|
||||
const len = buf.byteLength
|
||||
const ab = buf.buffer.slice(off, off + len)
|
||||
return Promise.resolve(ab)
|
||||
}
|
||||
|
||||
stream () {
|
||||
return new Minipass().end(this[BUFFER])
|
||||
}
|
||||
|
||||
slice (start, end, type) {
|
||||
const size = this.size
|
||||
const relativeStart = start === undefined ? 0
|
||||
: start < 0 ? Math.max(size + start, 0)
|
||||
: Math.min(start, size)
|
||||
const relativeEnd = end === undefined ? size
|
||||
: end < 0 ? Math.max(size + end, 0)
|
||||
: Math.min(end, size)
|
||||
const span = Math.max(relativeEnd - relativeStart, 0)
|
||||
|
||||
const buffer = this[BUFFER]
|
||||
const slicedBuffer = buffer.slice(
|
||||
relativeStart,
|
||||
relativeStart + span
|
||||
)
|
||||
const blob = new Blob([], { type })
|
||||
blob[BUFFER] = slicedBuffer
|
||||
return blob
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Blob'
|
||||
}
|
||||
|
||||
static get BUFFER () {
|
||||
return BUFFER
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Blob.prototype, {
|
||||
size: { enumerable: true },
|
||||
type: { enumerable: true },
|
||||
})
|
||||
|
||||
module.exports = Blob
|
|
@ -0,0 +1,334 @@
|
|||
'use strict'
|
||||
const Minipass = require('minipass')
|
||||
const MinipassSized = require('minipass-sized')
|
||||
|
||||
const Blob = require('./blob.js')
|
||||
const {BUFFER} = Blob
|
||||
const FetchError = require('./fetch-error.js')
|
||||
|
||||
// optional dependency on 'encoding'
|
||||
let convert
|
||||
try {
|
||||
convert = require('encoding').convert
|
||||
} catch (e) {}
|
||||
|
||||
const INTERNALS = Symbol('Body internals')
|
||||
const CONSUME_BODY = Symbol('consumeBody')
|
||||
|
||||
class Body {
|
||||
constructor (bodyArg, options = {}) {
|
||||
const { size = 0, timeout = 0 } = options
|
||||
const body = bodyArg === undefined || bodyArg === null ? null
|
||||
: isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString())
|
||||
: isBlob(bodyArg) ? bodyArg
|
||||
: Buffer.isBuffer(bodyArg) ? bodyArg
|
||||
: Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]'
|
||||
? Buffer.from(bodyArg)
|
||||
: ArrayBuffer.isView(bodyArg)
|
||||
? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength)
|
||||
: Minipass.isStream(bodyArg) ? bodyArg
|
||||
: Buffer.from(String(bodyArg))
|
||||
|
||||
this[INTERNALS] = {
|
||||
body,
|
||||
disturbed: false,
|
||||
error: null,
|
||||
}
|
||||
|
||||
this.size = size
|
||||
this.timeout = timeout
|
||||
|
||||
if (Minipass.isStream(body)) {
|
||||
body.on('error', er => {
|
||||
const error = er.name === 'AbortError' ? er
|
||||
: new FetchError(`Invalid response while trying to fetch ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
this[INTERNALS].error = error
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
get body () {
|
||||
return this[INTERNALS].body
|
||||
}
|
||||
|
||||
get bodyUsed () {
|
||||
return this[INTERNALS].disturbed
|
||||
}
|
||||
|
||||
arrayBuffer () {
|
||||
return this[CONSUME_BODY]().then(buf =>
|
||||
buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
|
||||
}
|
||||
|
||||
blob () {
|
||||
const ct = this.headers && this.headers.get('content-type') || ''
|
||||
return this[CONSUME_BODY]().then(buf => Object.assign(
|
||||
new Blob([], { type: ct.toLowerCase() }),
|
||||
{ [BUFFER]: buf }
|
||||
))
|
||||
}
|
||||
|
||||
json () {
|
||||
return this[CONSUME_BODY]().then(buf => {
|
||||
try {
|
||||
return JSON.parse(buf.toString())
|
||||
} catch (er) {
|
||||
return Promise.reject(new FetchError(
|
||||
`invalid json response body at ${
|
||||
this.url} reason: ${er.message}`, 'invalid-json'))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
text () {
|
||||
return this[CONSUME_BODY]().then(buf => buf.toString())
|
||||
}
|
||||
|
||||
buffer () {
|
||||
return this[CONSUME_BODY]()
|
||||
}
|
||||
|
||||
textConverted () {
|
||||
return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers))
|
||||
}
|
||||
|
||||
[CONSUME_BODY] () {
|
||||
if (this[INTERNALS].disturbed)
|
||||
return Promise.reject(new TypeError(`body used already for: ${
|
||||
this.url}`))
|
||||
|
||||
this[INTERNALS].disturbed = true
|
||||
|
||||
if (this[INTERNALS].error)
|
||||
return Promise.reject(this[INTERNALS].error)
|
||||
|
||||
// body is null
|
||||
if (this.body === null) {
|
||||
return Promise.resolve(Buffer.alloc(0))
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(this.body))
|
||||
return Promise.resolve(this.body)
|
||||
|
||||
const upstream = isBlob(this.body) ? this.body.stream() : this.body
|
||||
|
||||
/* istanbul ignore if: should never happen */
|
||||
if (!Minipass.isStream(upstream))
|
||||
return Promise.resolve(Buffer.alloc(0))
|
||||
|
||||
const stream = this.size && upstream instanceof MinipassSized ? upstream
|
||||
: !this.size && upstream instanceof Minipass &&
|
||||
!(upstream instanceof MinipassSized) ? upstream
|
||||
: this.size ? new MinipassSized({ size: this.size })
|
||||
: new Minipass()
|
||||
|
||||
// allow timeout on slow response body
|
||||
const resTimeout = this.timeout ? setTimeout(() => {
|
||||
stream.emit('error', new FetchError(
|
||||
`Response timeout while trying to fetch ${
|
||||
this.url} (over ${this.timeout}ms)`, 'body-timeout'))
|
||||
}, this.timeout) : null
|
||||
|
||||
// do not keep the process open just for this timeout, even
|
||||
// though we expect it'll get cleared eventually.
|
||||
if (resTimeout) {
|
||||
resTimeout.unref()
|
||||
}
|
||||
|
||||
// do the pipe in the promise, because the pipe() can send too much
|
||||
// data through right away and upset the MP Sized object
|
||||
return new Promise((resolve, reject) => {
|
||||
// if the stream is some other kind of stream, then pipe through a MP
|
||||
// so we can collect it more easily.
|
||||
if (stream !== upstream) {
|
||||
upstream.on('error', er => stream.emit('error', er))
|
||||
upstream.pipe(stream)
|
||||
}
|
||||
resolve()
|
||||
}).then(() => stream.concat()).then(buf => {
|
||||
clearTimeout(resTimeout)
|
||||
return buf
|
||||
}).catch(er => {
|
||||
clearTimeout(resTimeout)
|
||||
// request was aborted, reject with this Error
|
||||
if (er.name === 'AbortError' || er.name === 'FetchError')
|
||||
throw er
|
||||
else if (er.name === 'RangeError')
|
||||
throw new FetchError(`Could not create Buffer from response body for ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
else
|
||||
// other errors, such as incorrect content-encoding or content-length
|
||||
throw new FetchError(`Invalid response body while trying to fetch ${
|
||||
this.url}: ${er.message}`, 'system', er)
|
||||
})
|
||||
}
|
||||
|
||||
static clone (instance) {
|
||||
if (instance.bodyUsed)
|
||||
throw new Error('cannot clone body after it is used')
|
||||
|
||||
const body = instance.body
|
||||
|
||||
// check that body is a stream and not form-data object
|
||||
// NB: can't clone the form-data object without having it as a dependency
|
||||
if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') {
|
||||
// create a dedicated tee stream so that we don't lose data
|
||||
// potentially sitting in the body stream's buffer by writing it
|
||||
// immediately to p1 and not having it for p2.
|
||||
const tee = new Minipass()
|
||||
const p1 = new Minipass()
|
||||
const p2 = new Minipass()
|
||||
tee.on('error', er => {
|
||||
p1.emit('error', er)
|
||||
p2.emit('error', er)
|
||||
})
|
||||
body.on('error', er => tee.emit('error', er))
|
||||
tee.pipe(p1)
|
||||
tee.pipe(p2)
|
||||
body.pipe(tee)
|
||||
// set instance body to one fork, return the other
|
||||
instance[INTERNALS].body = p1
|
||||
return p2
|
||||
} else
|
||||
return instance.body
|
||||
}
|
||||
|
||||
static extractContentType (body) {
|
||||
return body === null || body === undefined ? null
|
||||
: typeof body === 'string' ? 'text/plain;charset=UTF-8'
|
||||
: isURLSearchParams(body)
|
||||
? 'application/x-www-form-urlencoded;charset=UTF-8'
|
||||
: isBlob(body) ? body.type || null
|
||||
: Buffer.isBuffer(body) ? null
|
||||
: Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null
|
||||
: ArrayBuffer.isView(body) ? null
|
||||
: typeof body.getBoundary === 'function'
|
||||
? `multipart/form-data;boundary=${body.getBoundary()}`
|
||||
: Minipass.isStream(body) ? null
|
||||
: 'text/plain;charset=UTF-8'
|
||||
}
|
||||
|
||||
static getTotalBytes (instance) {
|
||||
const {body} = instance
|
||||
return (body === null || body === undefined) ? 0
|
||||
: isBlob(body) ? body.size
|
||||
: Buffer.isBuffer(body) ? body.length
|
||||
: body && typeof body.getLengthSync === 'function' && (
|
||||
// detect form data input from form-data module
|
||||
body._lengthRetrievers &&
|
||||
/* istanbul ignore next */ body._lengthRetrievers.length == 0 || // 1.x
|
||||
body.hasKnownLength && body.hasKnownLength()) // 2.x
|
||||
? body.getLengthSync()
|
||||
: null
|
||||
}
|
||||
|
||||
static writeToStream (dest, instance) {
|
||||
const {body} = instance
|
||||
|
||||
if (body === null || body === undefined)
|
||||
dest.end()
|
||||
else if (Buffer.isBuffer(body) || typeof body === 'string')
|
||||
dest.end(body)
|
||||
else {
|
||||
// body is stream or blob
|
||||
const stream = isBlob(body) ? body.stream() : body
|
||||
stream.on('error', er => dest.emit('error', er)).pipe(dest)
|
||||
}
|
||||
|
||||
return dest
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Body.prototype, {
|
||||
body: { enumerable: true },
|
||||
bodyUsed: { enumerable: true },
|
||||
arrayBuffer: { enumerable: true },
|
||||
blob: { enumerable: true },
|
||||
json: { enumerable: true },
|
||||
text: { enumerable: true }
|
||||
})
|
||||
|
||||
|
||||
const isURLSearchParams = obj =>
|
||||
// Duck-typing as a necessary condition.
|
||||
(typeof obj !== 'object' ||
|
||||
typeof obj.append !== 'function' ||
|
||||
typeof obj.delete !== 'function' ||
|
||||
typeof obj.get !== 'function' ||
|
||||
typeof obj.getAll !== 'function' ||
|
||||
typeof obj.has !== 'function' ||
|
||||
typeof obj.set !== 'function') ? false
|
||||
// Brand-checking and more duck-typing as optional condition.
|
||||
: obj.constructor.name === 'URLSearchParams' ||
|
||||
Object.prototype.toString.call(obj) === '[object URLSearchParams]' ||
|
||||
typeof obj.sort === 'function'
|
||||
|
||||
const isBlob = obj =>
|
||||
typeof obj === 'object' &&
|
||||
typeof obj.arrayBuffer === 'function' &&
|
||||
typeof obj.type === 'string' &&
|
||||
typeof obj.stream === 'function' &&
|
||||
typeof obj.constructor === 'function' &&
|
||||
typeof obj.constructor.name === 'string' &&
|
||||
/^(Blob|File)$/.test(obj.constructor.name) &&
|
||||
/^(Blob|File)$/.test(obj[Symbol.toStringTag])
|
||||
|
||||
|
||||
const convertBody = (buffer, headers) => {
|
||||
/* istanbul ignore if */
|
||||
if (typeof convert !== 'function')
|
||||
throw new Error('The package `encoding` must be installed to use the textConverted() function')
|
||||
|
||||
const ct = headers && headers.get('content-type')
|
||||
let charset = 'utf-8'
|
||||
let res, str
|
||||
|
||||
// header
|
||||
if (ct)
|
||||
res = /charset=([^;]*)/i.exec(ct)
|
||||
|
||||
// no charset in content type, peek at response body for at most 1024 bytes
|
||||
str = buffer.slice(0, 1024).toString()
|
||||
|
||||
// html5
|
||||
if (!res && str)
|
||||
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str)
|
||||
|
||||
// html4
|
||||
if (!res && str) {
|
||||
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str)
|
||||
|
||||
if (!res) {
|
||||
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str)
|
||||
if (res)
|
||||
res.pop() // drop last quote
|
||||
}
|
||||
|
||||
if (res)
|
||||
res = /charset=(.*)/i.exec(res.pop())
|
||||
}
|
||||
|
||||
// xml
|
||||
if (!res && str)
|
||||
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str)
|
||||
|
||||
// found charset
|
||||
if (res) {
|
||||
charset = res.pop()
|
||||
|
||||
// prevent decode issues when sites use incorrect encoding
|
||||
// ref: https://hsivonen.fi/encoding-menu/
|
||||
if (charset === 'gb2312' || charset === 'gbk')
|
||||
charset = 'gb18030'
|
||||
}
|
||||
|
||||
// turn raw buffers into a single utf-8 buffer
|
||||
return convert(
|
||||
buffer,
|
||||
'UTF-8',
|
||||
charset
|
||||
).toString()
|
||||
}
|
||||
|
||||
module.exports = Body
|
|
@ -0,0 +1,31 @@
|
|||
'use strict'
|
||||
class FetchError extends Error {
|
||||
constructor (message, type, systemError) {
|
||||
super(message)
|
||||
this.code = 'FETCH_ERROR'
|
||||
|
||||
// pick up code, expected, path, ...
|
||||
if (systemError)
|
||||
Object.assign(this, systemError)
|
||||
|
||||
this.errno = this.code
|
||||
|
||||
// override anything the system error might've clobbered
|
||||
this.type = this.code === 'EBADSIZE' && this.found > this.expect
|
||||
? 'max-size' : type
|
||||
this.message = message
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'FetchError'
|
||||
}
|
||||
|
||||
// don't allow name to be overwritten
|
||||
set name (n) {}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'FetchError'
|
||||
}
|
||||
}
|
||||
module.exports = FetchError
|
|
@ -0,0 +1,250 @@
|
|||
'use strict'
|
||||
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/
|
||||
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/
|
||||
|
||||
const validateName = name => {
|
||||
name = `${name}`
|
||||
if (invalidTokenRegex.test(name) || name === '')
|
||||
throw new TypeError(`${name} is not a legal HTTP header name`)
|
||||
}
|
||||
|
||||
const validateValue = value => {
|
||||
value = `${value}`
|
||||
if (invalidHeaderCharRegex.test(value))
|
||||
throw new TypeError(`${value} is not a legal HTTP header value`)
|
||||
}
|
||||
|
||||
const find = (map, name) => {
|
||||
name = name.toLowerCase()
|
||||
for (const key in map) {
|
||||
if (key.toLowerCase() === name)
|
||||
return key
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const MAP = Symbol('map')
|
||||
class Headers {
|
||||
constructor (init = undefined) {
|
||||
this[MAP] = Object.create(null)
|
||||
if (init instanceof Headers) {
|
||||
const rawHeaders = init.raw()
|
||||
const headerNames = Object.keys(rawHeaders)
|
||||
for (const headerName of headerNames) {
|
||||
for (const value of rawHeaders[headerName]) {
|
||||
this.append(headerName, value)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// no-op
|
||||
if (init === undefined || init === null)
|
||||
return
|
||||
|
||||
if (typeof init === 'object') {
|
||||
const method = init[Symbol.iterator]
|
||||
if (method !== null && method !== undefined) {
|
||||
if (typeof method !== 'function')
|
||||
throw new TypeError('Header pairs must be iterable')
|
||||
|
||||
// sequence<sequence<ByteString>>
|
||||
// Note: per spec we have to first exhaust the lists then process them
|
||||
const pairs = []
|
||||
for (const pair of init) {
|
||||
if (typeof pair !== 'object' ||
|
||||
typeof pair[Symbol.iterator] !== 'function')
|
||||
throw new TypeError('Each header pair must be iterable')
|
||||
const arrPair = Array.from(pair)
|
||||
if (arrPair.length !== 2)
|
||||
throw new TypeError('Each header pair must be a name/value tuple')
|
||||
pairs.push(arrPair)
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
this.append(pair[0], pair[1])
|
||||
}
|
||||
} else {
|
||||
// record<ByteString, ByteString>
|
||||
for (const key of Object.keys(init)) {
|
||||
this.append(key, init[key])
|
||||
}
|
||||
}
|
||||
} else
|
||||
throw new TypeError('Provided initializer must be an object')
|
||||
}
|
||||
|
||||
get (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
const key = find(this[MAP], name)
|
||||
if (key === undefined)
|
||||
return null
|
||||
|
||||
return this[MAP][key].join(', ')
|
||||
}
|
||||
|
||||
forEach (callback, thisArg = undefined) {
|
||||
let pairs = getHeaders(this)
|
||||
for (let i = 0; i < pairs.length; i++) {
|
||||
const [name, value] = pairs[i]
|
||||
callback.call(thisArg, value, name, this)
|
||||
// refresh in case the callback added more headers
|
||||
pairs = getHeaders(this)
|
||||
}
|
||||
}
|
||||
|
||||
set (name, value) {
|
||||
name = `${name}`
|
||||
value = `${value}`
|
||||
validateName(name)
|
||||
validateValue(value)
|
||||
const key = find(this[MAP], name)
|
||||
this[MAP][key !== undefined ? key : name] = [value]
|
||||
}
|
||||
|
||||
append (name, value) {
|
||||
name = `${name}`
|
||||
value = `${value}`
|
||||
validateName(name)
|
||||
validateValue(value)
|
||||
const key = find(this[MAP], name)
|
||||
if (key !== undefined)
|
||||
this[MAP][key].push(value)
|
||||
else
|
||||
this[MAP][name] = [value]
|
||||
}
|
||||
|
||||
has (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
return find(this[MAP], name) !== undefined
|
||||
}
|
||||
|
||||
delete (name) {
|
||||
name = `${name}`
|
||||
validateName(name)
|
||||
const key = find(this[MAP], name)
|
||||
if (key !== undefined)
|
||||
delete this[MAP][key]
|
||||
}
|
||||
|
||||
raw () {
|
||||
return this[MAP]
|
||||
}
|
||||
|
||||
keys () {
|
||||
return new HeadersIterator(this, 'key')
|
||||
}
|
||||
|
||||
values () {
|
||||
return new HeadersIterator(this, 'value')
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return new HeadersIterator(this, 'key+value')
|
||||
}
|
||||
|
||||
entries () {
|
||||
return new HeadersIterator(this, 'key+value')
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Headers'
|
||||
}
|
||||
|
||||
static exportNodeCompatibleHeaders (headers) {
|
||||
const obj = Object.assign(Object.create(null), headers[MAP])
|
||||
|
||||
// http.request() only supports string as Host header. This hack makes
|
||||
// specifying custom Host header possible.
|
||||
const hostHeaderKey = find(headers[MAP], 'Host')
|
||||
if (hostHeaderKey !== undefined)
|
||||
obj[hostHeaderKey] = obj[hostHeaderKey][0]
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
static createHeadersLenient (obj) {
|
||||
const headers = new Headers()
|
||||
for (const name of Object.keys(obj)) {
|
||||
if (invalidTokenRegex.test(name))
|
||||
continue
|
||||
|
||||
if (Array.isArray(obj[name])) {
|
||||
for (const val of obj[name]) {
|
||||
if (invalidHeaderCharRegex.test(val))
|
||||
continue
|
||||
|
||||
if (headers[MAP][name] === undefined)
|
||||
headers[MAP][name] = [val]
|
||||
else
|
||||
headers[MAP][name].push(val)
|
||||
}
|
||||
} else if (!invalidHeaderCharRegex.test(obj[name]))
|
||||
headers[MAP][name] = [obj[name]]
|
||||
}
|
||||
return headers
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperties(Headers.prototype, {
|
||||
get: { enumerable: true },
|
||||
forEach: { enumerable: true },
|
||||
set: { enumerable: true },
|
||||
append: { enumerable: true },
|
||||
has: { enumerable: true },
|
||||
delete: { enumerable: true },
|
||||
keys: { enumerable: true },
|
||||
values: { enumerable: true },
|
||||
entries: { enumerable: true },
|
||||
})
|
||||
|
||||
const getHeaders = (headers, kind = 'key+value') =>
|
||||
Object.keys(headers[MAP]).sort().map(
|
||||
kind === 'key' ? k => k.toLowerCase()
|
||||
: kind === 'value' ? k => headers[MAP][k].join(', ')
|
||||
: k => [k.toLowerCase(), headers[MAP][k].join(', ')]
|
||||
)
|
||||
|
||||
const INTERNAL = Symbol('internal')
|
||||
|
||||
class HeadersIterator {
|
||||
constructor (target, kind) {
|
||||
this[INTERNAL] = {
|
||||
target,
|
||||
kind,
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'HeadersIterator'
|
||||
}
|
||||
|
||||
next () {
|
||||
/* istanbul ignore if: should be impossible */
|
||||
if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype)
|
||||
throw new TypeError('Value of `this` is not a HeadersIterator')
|
||||
|
||||
const { target, kind, index } = this[INTERNAL]
|
||||
const values = getHeaders(target, kind)
|
||||
const len = values.length
|
||||
if (index >= len) {
|
||||
return {
|
||||
value: undefined,
|
||||
done: true,
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNAL].index++
|
||||
|
||||
return { value: values[index], done: false }
|
||||
}
|
||||
}
|
||||
|
||||
// manually extend because 'extends' requires a ctor
|
||||
Object.setPrototypeOf(HeadersIterator.prototype,
|
||||
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())))
|
||||
|
||||
module.exports = Headers
|
|
@ -0,0 +1,341 @@
|
|||
'use strict'
|
||||
const Url = require('url')
|
||||
const http = require('http')
|
||||
const https = require('https')
|
||||
const zlib = require('minizlib')
|
||||
const Minipass = require('minipass')
|
||||
|
||||
const Body = require('./body.js')
|
||||
const { writeToStream, getTotalBytes } = Body
|
||||
const Response = require('./response.js')
|
||||
const Headers = require('./headers.js')
|
||||
const { createHeadersLenient } = Headers
|
||||
const Request = require('./request.js')
|
||||
const { getNodeRequestOptions } = Request
|
||||
const FetchError = require('./fetch-error.js')
|
||||
const AbortError = require('./abort-error.js')
|
||||
|
||||
const resolveUrl = Url.resolve
|
||||
|
||||
const fetch = (url, opts) => {
|
||||
if (/^data:/.test(url)) {
|
||||
const request = new Request(url, opts)
|
||||
try {
|
||||
const split = url.split(',')
|
||||
const data = Buffer.from(split[1], 'base64')
|
||||
const type = split[0].match(/^data:(.*);base64$/)[1]
|
||||
return Promise.resolve(new Response(data, {
|
||||
headers: {
|
||||
'Content-Type': type,
|
||||
'Content-Length': data.length,
|
||||
}
|
||||
}))
|
||||
} catch (er) {
|
||||
return Promise.reject(new FetchError(`[${request.method}] ${
|
||||
request.url} invalid URL, ${er.message}`, 'system', er))
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// build request object
|
||||
const request = new Request(url, opts)
|
||||
let options
|
||||
try {
|
||||
options = getNodeRequestOptions(request)
|
||||
} catch (er) {
|
||||
return reject(er)
|
||||
}
|
||||
|
||||
const send = (options.protocol === 'https:' ? https : http).request
|
||||
const { signal } = request
|
||||
let response = null
|
||||
const abort = () => {
|
||||
const error = new AbortError('The user aborted a request.')
|
||||
reject(error)
|
||||
if (Minipass.isStream(request.body) &&
|
||||
typeof request.body.destroy === 'function') {
|
||||
request.body.destroy(error)
|
||||
}
|
||||
if (response && response.body) {
|
||||
response.body.emit('error', error)
|
||||
}
|
||||
}
|
||||
|
||||
if (signal && signal.aborted)
|
||||
return abort()
|
||||
|
||||
const abortAndFinalize = () => {
|
||||
abort()
|
||||
finalize()
|
||||
}
|
||||
|
||||
const finalize = () => {
|
||||
req.abort()
|
||||
if (signal)
|
||||
signal.removeEventListener('abort', abortAndFinalize)
|
||||
clearTimeout(reqTimeout)
|
||||
}
|
||||
|
||||
// send request
|
||||
const req = send(options)
|
||||
|
||||
if (signal)
|
||||
signal.addEventListener('abort', abortAndFinalize)
|
||||
|
||||
let reqTimeout = null
|
||||
if (request.timeout) {
|
||||
req.once('socket', socket => {
|
||||
reqTimeout = setTimeout(() => {
|
||||
reject(new FetchError(`network timeout at: ${
|
||||
request.url}`, 'request-timeout'))
|
||||
finalize()
|
||||
}, request.timeout)
|
||||
})
|
||||
}
|
||||
|
||||
req.on('error', er => {
|
||||
// if a 'response' event is emitted before the 'error' event, then by the
|
||||
// time this handler is run it's too late to reject the Promise for the
|
||||
// response. instead, we forward the error event to the response stream
|
||||
// so that the error will surface to the user when they try to consume
|
||||
// the body. this is done as a side effect of aborting the request except
|
||||
// for in windows, where we must forward the event manually, otherwise
|
||||
// there is no longer a ref'd socket attached to the request and the
|
||||
// stream never ends so the event loop runs out of work and the process
|
||||
// exits without warning.
|
||||
// coverage skipped here due to the difficulty in testing
|
||||
// istanbul ignore next
|
||||
if (req.res)
|
||||
req.res.emit('error', er)
|
||||
reject(new FetchError(`request to ${request.url} failed, reason: ${
|
||||
er.message}`, 'system', er))
|
||||
finalize()
|
||||
})
|
||||
|
||||
req.on('response', res => {
|
||||
clearTimeout(reqTimeout)
|
||||
|
||||
const headers = createHeadersLenient(res.headers)
|
||||
|
||||
// HTTP fetch step 5
|
||||
if (fetch.isRedirect(res.statusCode)) {
|
||||
// HTTP fetch step 5.2
|
||||
const location = headers.get('Location')
|
||||
|
||||
// HTTP fetch step 5.3
|
||||
const locationURL = location === null ? null
|
||||
: resolveUrl(request.url, location)
|
||||
|
||||
// HTTP fetch step 5.5
|
||||
switch (request.redirect) {
|
||||
case 'error':
|
||||
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${
|
||||
request.url}`, 'no-redirect'))
|
||||
finalize()
|
||||
return
|
||||
|
||||
case 'manual':
|
||||
// node-fetch-specific step: make manual redirect a bit easier to
|
||||
// use by setting the Location header value to the resolved URL.
|
||||
if (locationURL !== null) {
|
||||
// handle corrupted header
|
||||
try {
|
||||
headers.set('Location', locationURL)
|
||||
} catch (err) {
|
||||
/* istanbul ignore next: nodejs server prevent invalid
|
||||
response headers, we can't test this through normal
|
||||
request */
|
||||
reject(err)
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
case 'follow':
|
||||
// HTTP-redirect fetch step 2
|
||||
if (locationURL === null) {
|
||||
break
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 5
|
||||
if (request.counter >= request.follow) {
|
||||
reject(new FetchError(`maximum redirect reached at: ${
|
||||
request.url}`, 'max-redirect'))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 9
|
||||
if (res.statusCode !== 303 &&
|
||||
request.body &&
|
||||
getTotalBytes(request) === null) {
|
||||
reject(new FetchError(
|
||||
'Cannot follow redirect with body being a readable stream',
|
||||
'unsupported-redirect'
|
||||
))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
|
||||
// Update host due to redirection
|
||||
request.headers.set('host', Url.parse(locationURL).host)
|
||||
|
||||
// HTTP-redirect fetch step 6 (counter increment)
|
||||
// Create a new Request object.
|
||||
const requestOpts = {
|
||||
headers: new Headers(request.headers),
|
||||
follow: request.follow,
|
||||
counter: request.counter + 1,
|
||||
agent: request.agent,
|
||||
compress: request.compress,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
signal: request.signal,
|
||||
timeout: request.timeout,
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 11
|
||||
if (res.statusCode === 303 || (
|
||||
(res.statusCode === 301 || res.statusCode === 302) &&
|
||||
request.method === 'POST'
|
||||
)) {
|
||||
requestOpts.method = 'GET'
|
||||
requestOpts.body = undefined
|
||||
requestOpts.headers.delete('content-length')
|
||||
}
|
||||
|
||||
// HTTP-redirect fetch step 15
|
||||
resolve(fetch(new Request(locationURL, requestOpts)))
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
} // end if(isRedirect)
|
||||
|
||||
|
||||
// prepare response
|
||||
res.once('end', () =>
|
||||
signal && signal.removeEventListener('abort', abortAndFinalize))
|
||||
|
||||
const body = new Minipass()
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
res.on('error', /* istanbul ignore next */ er => body.emit('error', er))
|
||||
res.on('data', (chunk) => body.write(chunk))
|
||||
res.on('end', () => body.end())
|
||||
|
||||
const responseOptions = {
|
||||
url: request.url,
|
||||
status: res.statusCode,
|
||||
statusText: res.statusMessage,
|
||||
headers: headers,
|
||||
size: request.size,
|
||||
timeout: request.timeout,
|
||||
counter: request.counter,
|
||||
trailer: new Promise(resolve =>
|
||||
res.on('end', () => resolve(createHeadersLenient(res.trailers))))
|
||||
}
|
||||
|
||||
// HTTP-network fetch step 12.1.1.3
|
||||
const codings = headers.get('Content-Encoding')
|
||||
|
||||
// HTTP-network fetch step 12.1.1.4: handle content codings
|
||||
|
||||
// in following scenarios we ignore compression support
|
||||
// 1. compression support is disabled
|
||||
// 2. HEAD request
|
||||
// 3. no Content-Encoding header
|
||||
// 4. no content response (204)
|
||||
// 5. content not modified response (304)
|
||||
if (!request.compress ||
|
||||
request.method === 'HEAD' ||
|
||||
codings === null ||
|
||||
res.statusCode === 204 ||
|
||||
res.statusCode === 304) {
|
||||
response = new Response(body, responseOptions)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// Be less strict when decoding compressed responses, since sometimes
|
||||
// servers send slightly invalid responses that are still accepted
|
||||
// by common browsers.
|
||||
// Always using Z_SYNC_FLUSH is what cURL does.
|
||||
const zlibOptions = {
|
||||
flush: zlib.constants.Z_SYNC_FLUSH,
|
||||
finishFlush: zlib.constants.Z_SYNC_FLUSH,
|
||||
}
|
||||
|
||||
// for gzip
|
||||
if (codings == 'gzip' || codings == 'x-gzip') {
|
||||
const unzip = new zlib.Gunzip(zlibOptions)
|
||||
response = new Response(
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip),
|
||||
responseOptions
|
||||
)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
// for deflate
|
||||
if (codings == 'deflate' || codings == 'x-deflate') {
|
||||
// handle the infamous raw deflate response from old servers
|
||||
// a hack for old IIS and Apache servers
|
||||
const raw = res.pipe(new Minipass())
|
||||
raw.once('data', chunk => {
|
||||
// see http://stackoverflow.com/questions/37519828
|
||||
const decoder = (chunk[0] & 0x0F) === 0x08
|
||||
? new zlib.Inflate()
|
||||
: new zlib.InflateRaw()
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
|
||||
response = new Response(decoder, responseOptions)
|
||||
resolve(response)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// for br
|
||||
if (codings == 'br') {
|
||||
// ignoring coverage so tests don't have to fake support (or lack of) for brotli
|
||||
// istanbul ignore next
|
||||
try {
|
||||
var decoder = new zlib.BrotliDecompress()
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
finalize()
|
||||
return
|
||||
}
|
||||
// exceedingly rare that the stream would have an error,
|
||||
// but just in case we proxy it to the stream in use.
|
||||
body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
|
||||
response = new Response(decoder, responseOptions)
|
||||
resolve(response)
|
||||
return
|
||||
}
|
||||
|
||||
// otherwise, use response as-is
|
||||
response = new Response(body, responseOptions)
|
||||
resolve(response)
|
||||
})
|
||||
|
||||
writeToStream(req, request)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = fetch
|
||||
|
||||
fetch.isRedirect = code =>
|
||||
code === 301 ||
|
||||
code === 302 ||
|
||||
code === 303 ||
|
||||
code === 307 ||
|
||||
code === 308
|
||||
|
||||
fetch.Headers = Headers
|
||||
fetch.Request = Request
|
||||
fetch.Response = Response
|
||||
fetch.FetchError = FetchError
|
|
@ -0,0 +1,263 @@
|
|||
'use strict'
|
||||
const Url = require('url')
|
||||
const Minipass = require('minipass')
|
||||
const Headers = require('./headers.js')
|
||||
const { exportNodeCompatibleHeaders } = Headers
|
||||
const Body = require('./body.js')
|
||||
const { clone, extractContentType, getTotalBytes } = Body
|
||||
|
||||
const version = require('../package.json').version
|
||||
const defaultUserAgent =
|
||||
`minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)`
|
||||
|
||||
const INTERNALS = Symbol('Request internals')
|
||||
|
||||
const { parse: parseUrl, format: formatUrl } = Url
|
||||
|
||||
const isRequest = input =>
|
||||
typeof input === 'object' && typeof input[INTERNALS] === 'object'
|
||||
|
||||
const isAbortSignal = signal => {
|
||||
const proto = (
|
||||
signal
|
||||
&& typeof signal === 'object'
|
||||
&& Object.getPrototypeOf(signal)
|
||||
)
|
||||
return !!(proto && proto.constructor.name === 'AbortSignal')
|
||||
}
|
||||
|
||||
class Request extends Body {
|
||||
constructor (input, init = {}) {
|
||||
const parsedURL = isRequest(input) ? Url.parse(input.url)
|
||||
: input && input.href ? Url.parse(input.href)
|
||||
: Url.parse(`${input}`)
|
||||
|
||||
if (isRequest(input))
|
||||
init = { ...input[INTERNALS], ...init }
|
||||
else if (!input || typeof input === 'string')
|
||||
input = {}
|
||||
|
||||
const method = (init.method || input.method || 'GET').toUpperCase()
|
||||
const isGETHEAD = method === 'GET' || method === 'HEAD'
|
||||
|
||||
if ((init.body !== null && init.body !== undefined ||
|
||||
isRequest(input) && input.body !== null) && isGETHEAD)
|
||||
throw new TypeError('Request with GET/HEAD method cannot have body')
|
||||
|
||||
const inputBody = init.body !== null && init.body !== undefined ? init.body
|
||||
: isRequest(input) && input.body !== null ? clone(input)
|
||||
: null
|
||||
|
||||
super(inputBody, {
|
||||
timeout: init.timeout || input.timeout || 0,
|
||||
size: init.size || input.size || 0,
|
||||
})
|
||||
|
||||
const headers = new Headers(init.headers || input.headers || {})
|
||||
|
||||
if (inputBody !== null && inputBody !== undefined &&
|
||||
!headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(inputBody)
|
||||
if (contentType)
|
||||
headers.append('Content-Type', contentType)
|
||||
}
|
||||
|
||||
const signal = 'signal' in init ? init.signal
|
||||
: null
|
||||
|
||||
if (signal !== null && signal !== undefined && !isAbortSignal(signal))
|
||||
throw new TypeError('Expected signal must be an instanceof AbortSignal')
|
||||
|
||||
// TLS specific options that are handled by node
|
||||
const {
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0',
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
} = init
|
||||
|
||||
this[INTERNALS] = {
|
||||
method,
|
||||
redirect: init.redirect || input.redirect || 'follow',
|
||||
headers,
|
||||
parsedURL,
|
||||
signal,
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
}
|
||||
|
||||
// node-fetch-only options
|
||||
this.follow = init.follow !== undefined ? init.follow
|
||||
: input.follow !== undefined ? input.follow
|
||||
: 20
|
||||
this.compress = init.compress !== undefined ? init.compress
|
||||
: input.compress !== undefined ? input.compress
|
||||
: true
|
||||
this.counter = init.counter || input.counter || 0
|
||||
this.agent = init.agent || input.agent
|
||||
}
|
||||
|
||||
get method() {
|
||||
return this[INTERNALS].method
|
||||
}
|
||||
|
||||
get url() {
|
||||
return formatUrl(this[INTERNALS].parsedURL)
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers
|
||||
}
|
||||
|
||||
get redirect() {
|
||||
return this[INTERNALS].redirect
|
||||
}
|
||||
|
||||
get signal() {
|
||||
return this[INTERNALS].signal
|
||||
}
|
||||
|
||||
clone () {
|
||||
return new Request(this)
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Request'
|
||||
}
|
||||
|
||||
static getNodeRequestOptions (request) {
|
||||
const parsedURL = request[INTERNALS].parsedURL
|
||||
const headers = new Headers(request[INTERNALS].headers)
|
||||
|
||||
// fetch step 1.3
|
||||
if (!headers.has('Accept'))
|
||||
headers.set('Accept', '*/*')
|
||||
|
||||
// Basic fetch
|
||||
if (!parsedURL.protocol || !parsedURL.hostname)
|
||||
throw new TypeError('Only absolute URLs are supported')
|
||||
|
||||
if (!/^https?:$/.test(parsedURL.protocol))
|
||||
throw new TypeError('Only HTTP(S) protocols are supported')
|
||||
|
||||
if (request.signal &&
|
||||
Minipass.isStream(request.body) &&
|
||||
typeof request.body.destroy !== 'function') {
|
||||
throw new Error(
|
||||
'Cancellation of streamed requests with AbortSignal is not supported')
|
||||
}
|
||||
|
||||
// HTTP-network-or-cache fetch steps 2.4-2.7
|
||||
const contentLengthValue =
|
||||
(request.body === null || request.body === undefined) &&
|
||||
/^(POST|PUT)$/i.test(request.method) ? '0'
|
||||
: request.body !== null && request.body !== undefined
|
||||
? getTotalBytes(request)
|
||||
: null
|
||||
|
||||
if (contentLengthValue)
|
||||
headers.set('Content-Length', contentLengthValue + '')
|
||||
|
||||
// HTTP-network-or-cache fetch step 2.11
|
||||
if (!headers.has('User-Agent'))
|
||||
headers.set('User-Agent', defaultUserAgent)
|
||||
|
||||
// HTTP-network-or-cache fetch step 2.15
|
||||
if (request.compress && !headers.has('Accept-Encoding'))
|
||||
headers.set('Accept-Encoding', 'gzip,deflate')
|
||||
|
||||
const agent = typeof request.agent === 'function'
|
||||
? request.agent(parsedURL)
|
||||
: request.agent
|
||||
|
||||
if (!headers.has('Connection') && !agent)
|
||||
headers.set('Connection', 'close')
|
||||
|
||||
// TLS specific options that are handled by node
|
||||
const {
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
} = request[INTERNALS]
|
||||
|
||||
// HTTP-network fetch step 4.2
|
||||
// chunked encoding is handled by Node.js
|
||||
|
||||
return {
|
||||
...parsedURL,
|
||||
method: request.method,
|
||||
headers: exportNodeCompatibleHeaders(headers),
|
||||
agent,
|
||||
ca,
|
||||
cert,
|
||||
ciphers,
|
||||
clientCertEngine,
|
||||
crl,
|
||||
dhparam,
|
||||
ecdhCurve,
|
||||
family,
|
||||
honorCipherOrder,
|
||||
key,
|
||||
passphrase,
|
||||
pfx,
|
||||
rejectUnauthorized,
|
||||
secureOptions,
|
||||
secureProtocol,
|
||||
servername,
|
||||
sessionIdContext,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Request
|
||||
|
||||
Object.defineProperties(Request.prototype, {
|
||||
method: { enumerable: true },
|
||||
url: { enumerable: true },
|
||||
headers: { enumerable: true },
|
||||
redirect: { enumerable: true },
|
||||
clone: { enumerable: true },
|
||||
signal: { enumerable: true },
|
||||
})
|
|
@ -0,0 +1,89 @@
|
|||
'use strict'
|
||||
const http = require('http')
|
||||
const { STATUS_CODES } = http
|
||||
|
||||
const Headers = require('./headers.js')
|
||||
const Body = require('./body.js')
|
||||
const { clone, extractContentType } = Body
|
||||
|
||||
const INTERNALS = Symbol('Response internals')
|
||||
|
||||
class Response extends Body {
|
||||
constructor (body = null, opts = {}) {
|
||||
super(body, opts)
|
||||
|
||||
const status = opts.status || 200
|
||||
const headers = new Headers(opts.headers)
|
||||
|
||||
if (body !== null && body !== undefined && !headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(body)
|
||||
if (contentType)
|
||||
headers.append('Content-Type', contentType)
|
||||
}
|
||||
|
||||
this[INTERNALS] = {
|
||||
url: opts.url,
|
||||
status,
|
||||
statusText: opts.statusText || STATUS_CODES[status],
|
||||
headers,
|
||||
counter: opts.counter,
|
||||
trailer: Promise.resolve(opts.trailer || new Headers()),
|
||||
}
|
||||
}
|
||||
|
||||
get trailer () {
|
||||
return this[INTERNALS].trailer
|
||||
}
|
||||
|
||||
get url () {
|
||||
return this[INTERNALS].url || ''
|
||||
}
|
||||
|
||||
get status () {
|
||||
return this[INTERNALS].status
|
||||
}
|
||||
|
||||
get ok () {
|
||||
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300
|
||||
}
|
||||
|
||||
get redirected () {
|
||||
return this[INTERNALS].counter > 0
|
||||
}
|
||||
|
||||
get statusText () {
|
||||
return this[INTERNALS].statusText
|
||||
}
|
||||
|
||||
get headers () {
|
||||
return this[INTERNALS].headers
|
||||
}
|
||||
|
||||
clone () {
|
||||
return new Response(clone(this), {
|
||||
url: this.url,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
headers: this.headers,
|
||||
ok: this.ok,
|
||||
redirected: this.redirected,
|
||||
trailer: this.trailer,
|
||||
})
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag] () {
|
||||
return 'Response'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Response
|
||||
|
||||
Object.defineProperties(Response.prototype, {
|
||||
url: { enumerable: true },
|
||||
status: { enumerable: true },
|
||||
ok: { enumerable: true },
|
||||
redirected: { enumerable: true },
|
||||
statusText: { enumerable: true },
|
||||
headers: { enumerable: true },
|
||||
clone: { enumerable: true },
|
||||
})
|
|
@ -0,0 +1 @@
|
|||
module.exports = test => test.replace(/^test[\/\\]/, 'lib/')
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"name": "minipass-fetch",
|
||||
"version": "1.4.1",
|
||||
"description": "An implementation of window.fetch in Node.js using Minipass streams",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"coverage-map": "map.js",
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ungap/url-search-params": "^0.1.2",
|
||||
"abort-controller": "^3.0.0",
|
||||
"abortcontroller-polyfill": "~1.3.0",
|
||||
"form-data": "^2.5.1",
|
||||
"parted": "^0.1.1",
|
||||
"string-to-arraybuffer": "^1.0.2",
|
||||
"tap": "^15.0.9",
|
||||
"whatwg-url": "^7.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.1.0",
|
||||
"minipass-sized": "^1.0.3",
|
||||
"minizlib": "^2.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"encoding": "^0.1.12"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/minipass-fetch.git"
|
||||
},
|
||||
"keywords": [
|
||||
"fetch",
|
||||
"minipass",
|
||||
"node-fetch",
|
||||
"window.fetch"
|
||||
],
|
||||
"files": [
|
||||
"index.js",
|
||||
"lib/*.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict'
|
||||
const AbortError = require('../lib/abort-error.js')
|
||||
const t = require('tap')
|
||||
const ae = new AbortError('foo')
|
||||
t.match(ae, {
|
||||
name: 'AbortError',
|
||||
stack: String,
|
||||
code: 'FETCH_ABORTED',
|
||||
type: 'aborted',
|
||||
})
|
||||
ae.name = 'foo'
|
||||
t.equal(ae.name, 'AbortError', 'cannot override name')
|
|
@ -0,0 +1,69 @@
|
|||
'use strict'
|
||||
const Blob = require('../lib/blob.js')
|
||||
const t = require('tap')
|
||||
const stringToArrayBuffer = require('string-to-arraybuffer')
|
||||
|
||||
t.test('null case', t => {
|
||||
const b = new Blob()
|
||||
t.equal(b.toString(), '[object Blob]')
|
||||
return b.text().then(res => t.equal(res, ''))
|
||||
.then(() => b.arrayBuffer())
|
||||
.then(buf => t.match(buf, Buffer.alloc(0)))
|
||||
})
|
||||
|
||||
t.test('mix of stuff', t => {
|
||||
const b = new Blob([
|
||||
Buffer.from('one'),
|
||||
' ',
|
||||
stringToArrayBuffer('two'),
|
||||
' ',
|
||||
new Uint8Array(stringToArrayBuffer('three')),
|
||||
new Blob(' '),
|
||||
{ toString () { return 'four' } },
|
||||
], { type: 'foo' })
|
||||
const x = 'one two three four'
|
||||
t.equal(b.type, 'foo')
|
||||
t.equal(b.size, x.length)
|
||||
|
||||
return b.text()
|
||||
.then(text => t.equal(text, x))
|
||||
.then(() => b.stream())
|
||||
.then(s => s.concat())
|
||||
.then(s => t.equal(s.toString(), x))
|
||||
.then(() => b.arrayBuffer())
|
||||
.then(ab => t.match(Buffer.from(ab), Buffer.from(x)))
|
||||
})
|
||||
|
||||
t.test('slice', t => {
|
||||
const b = new Blob('1 2 3 4', { type: 'x' })
|
||||
const b1 = b.slice(2)
|
||||
t.equal(b1.type, '')
|
||||
const b2 = b.slice(2, 4, 'type')
|
||||
t.equal(b2.type, 'type')
|
||||
const b3 = b.slice(2, -2)
|
||||
const b4 = b.slice(-4)
|
||||
const b5 = b.slice(4, -4)
|
||||
const b6 = b.slice()
|
||||
return Promise.all([
|
||||
b1.text(),
|
||||
b2.text(),
|
||||
b3.text(),
|
||||
b4.text(),
|
||||
b5.text(),
|
||||
b6.text(),
|
||||
]).then(([b1, b2, b3, b4, b5, b6]) =>
|
||||
t.strictSame({b1, b2, b3, b4, b5, b6}, {
|
||||
b1: '2 3 4',
|
||||
b2: '2 ',
|
||||
b3: '2 3',
|
||||
b4: ' 3 4',
|
||||
b5: '',
|
||||
b6: '1 2 3 4',
|
||||
}))
|
||||
})
|
||||
|
||||
t.test('expose the BUFFER symbol as read-only static property', t => {
|
||||
t.match(Blob.BUFFER, Symbol('buffer'))
|
||||
t.throws(() => Blob.BUFFER = 'fubber')
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,344 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const Body = require('../lib/body.js')
|
||||
const { URLSearchParams } = require('url')
|
||||
const stringToArrayBuffer = require('string-to-arraybuffer')
|
||||
const URLSearchParams_Polyfill = require('@ungap/url-search-params')
|
||||
const Blob = require('../lib/blob')
|
||||
const FormData = require('form-data')
|
||||
const Minipass = require('minipass')
|
||||
const MinipassSized = require('minipass-sized')
|
||||
const AbortError = require('../lib/abort-error.js')
|
||||
const {PassThrough} = require('stream')
|
||||
|
||||
t.test('null body', async t => {
|
||||
const b = new Body()
|
||||
t.equal(b.body, null)
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(Body.getTotalBytes(b), 0)
|
||||
t.match(await b.buffer(), Buffer.alloc(0))
|
||||
})
|
||||
|
||||
t.test('url search params', async t => {
|
||||
const b = new Body(new URLSearchParams('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(Body.getTotalBytes(b), 3)
|
||||
})
|
||||
|
||||
t.test('url search params polyfill', async t => {
|
||||
const b = new Body(new URLSearchParams_Polyfill('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(Body.getTotalBytes(b), 3)
|
||||
})
|
||||
|
||||
t.test('url search params by another name', async t => {
|
||||
const b = new Body(new (class Florb extends URLSearchParams {})('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(Body.getTotalBytes(b), 3)
|
||||
})
|
||||
|
||||
t.test('url search params by an even differenter name', async t => {
|
||||
const b = new Body(new (class Florb extends URLSearchParams {
|
||||
get [Symbol.toStringTag] () { return 'Florb' }
|
||||
})('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(Body.getTotalBytes(b), 3)
|
||||
})
|
||||
|
||||
|
||||
t.test('form-data', async t => {
|
||||
const f = new FormData()
|
||||
f.append('a','1')
|
||||
const b = new Body(f)
|
||||
t.match(b.body.getBuffer().toString(),`
|
||||
Content-Disposition: form-data; name="a"\r
|
||||
\r
|
||||
1\r
|
||||
`)
|
||||
t.equal(Body.extractContentType(b.body),
|
||||
'multipart/form-data;boundary=' + f.getBoundary())
|
||||
t.equal(Body.getTotalBytes(b), f.getBuffer().length)
|
||||
})
|
||||
|
||||
t.test('blob body', async t => {
|
||||
const b = new Body(new Blob('a=1', {type: 'foo', size: 3}))
|
||||
b.url = 'double'
|
||||
t.equal(Body.getTotalBytes(b), 3)
|
||||
t.equal(Body.extractContentType(b.body), 'foo')
|
||||
t.equal(b.bodyUsed, false)
|
||||
t.equal(await b.text(), 'a=1')
|
||||
t.equal(b.bodyUsed, true)
|
||||
await t.rejects(() => b.buffer(), TypeError)
|
||||
})
|
||||
|
||||
t.test('blob body no content-type', async t => {
|
||||
const b = new Body(new Blob('a=1', { size: 3 }))
|
||||
b.headers = { get () {} }
|
||||
t.match(await b.blob(), {
|
||||
[Blob.BUFFER]: Buffer.from('a=1'),
|
||||
size: 3,
|
||||
type: '',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('blob body with content-type', async t => {
|
||||
const b = new Body(new Blob('a=1', { size: 3 }))
|
||||
b.headers = { get () { return 'glerb' } }
|
||||
t.match(await b.blob(), {
|
||||
[Blob.BUFFER]: Buffer.from('a=1'),
|
||||
size: 3,
|
||||
type: 'glerb',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('buffer body', async t => {
|
||||
const b = new Body(Buffer.from('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(await b.arrayBuffer().then(b => Buffer.from(b).toString()), 'a=1')
|
||||
})
|
||||
|
||||
t.test('array buffer body', async t => {
|
||||
const b = new Body(stringToArrayBuffer('a=1'))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
})
|
||||
|
||||
t.test('uint 8 array body', async t => {
|
||||
const b = new Body(new Uint8Array(stringToArrayBuffer('a=1')))
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
})
|
||||
|
||||
t.test('stream body', async t => {
|
||||
const b = new Body(new Minipass({encoding:'utf8'}).end('a=1'))
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(await b.text(), 'a=1')
|
||||
})
|
||||
|
||||
t.test('stream body with size', async t => {
|
||||
const b = new Body(new Minipass({encoding:'utf8'}).end('a=1'), { size: 3 })
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(await b.text(), 'a=1')
|
||||
})
|
||||
|
||||
t.test('stream body with size thats already checking size', async t => {
|
||||
const b = new Body(new MinipassSized({size: 3, encoding:'utf8'}).end('a=1'), { size: 3 })
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(await b.text(), 'a=1')
|
||||
})
|
||||
|
||||
t.test('stream body that is a core stream', async t => {
|
||||
const b = new Body(new PassThrough({encoding:'utf8'}).end('a=1'))
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
t.equal(await b.text(), 'a=1')
|
||||
})
|
||||
|
||||
t.test('stream body goes too long', async t => {
|
||||
const b = new Body(new PassThrough({encoding:'utf8'}).end('a=1'), {size: 1})
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
await t.rejects(b.text(), {
|
||||
name: 'FetchError',
|
||||
code: 'EBADSIZE',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('simulated buffer creation problem', async t => {
|
||||
const s = new PassThrough()
|
||||
const b = new Body(s)
|
||||
b.url = 'xyz'
|
||||
setTimeout(() => s.emit('error', new RangeError('hello')))
|
||||
await t.rejects(b.buffer(), {
|
||||
name: 'FetchError',
|
||||
message: 'Could not create Buffer from response body for xyz: hello',
|
||||
type: 'system',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('stream body too slow', async t => {
|
||||
const b = new Body(new Minipass(), { timeout: 1 })
|
||||
b.url = 'sloowwwwww'
|
||||
// keep the process open, like the actual HTTP channel would
|
||||
setTimeout(() => {}, 10)
|
||||
await t.rejects(b.text(), {
|
||||
name: 'FetchError',
|
||||
message: 'Response timeout while trying to fetch sloowwwwww (over 1ms)',
|
||||
type: 'body-timeout',
|
||||
code: 'FETCH_ERROR',
|
||||
errno: 'FETCH_ERROR',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('random toString-ing thing body', async t => {
|
||||
const b = new Body({ toString () { return 'a=1' } })
|
||||
t.equal(b.body.toString(), 'a=1')
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
})
|
||||
|
||||
t.test('set size and timeout', async t => {
|
||||
const b = new Body('a=1', { size: 3, timeout: 1000 })
|
||||
t.equal(b.size, 3)
|
||||
t.equal(b.timeout, 1000)
|
||||
t.equal(Body.extractContentType(b.body), null)
|
||||
})
|
||||
|
||||
t.test('body stream emits error', async t => {
|
||||
const errorer = new Minipass()
|
||||
const b = new Body(errorer)
|
||||
b.url = 'glorp'
|
||||
errorer.emit('error', new Error('poop'))
|
||||
await t.rejects(b.buffer(), {
|
||||
name: 'FetchError',
|
||||
message: 'Invalid response while trying to fetch glorp: poop',
|
||||
type: 'system',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('body stream emits AbortError', async t => {
|
||||
const aborter = new Minipass()
|
||||
const b = new Body(aborter)
|
||||
b.url = 'retroba'
|
||||
aborter.emit('error', new AbortError('bork'))
|
||||
await t.rejects(b.buffer(), {
|
||||
name: 'AbortError',
|
||||
message: 'bork',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('more static method coverage', async t => {
|
||||
t.equal(Body.extractContentType('a=1'), 'text/plain;charset=UTF-8')
|
||||
t.equal(Body.extractContentType(new URLSearchParams('a=1')),
|
||||
'application/x-www-form-urlencoded;charset=UTF-8')
|
||||
t.equal(Body.extractContentType(stringToArrayBuffer('a=1')), null)
|
||||
t.equal(Body.extractContentType(new Uint8Array(stringToArrayBuffer('a=1'))),
|
||||
null)
|
||||
t.equal(Body.extractContentType(new Blob()), null)
|
||||
t.equal(Body.extractContentType({}), 'text/plain;charset=UTF-8')
|
||||
t.equal(Body.getTotalBytes({body:{}}), null)
|
||||
})
|
||||
|
||||
t.test('json', async t => {
|
||||
t.same(await new Body('{"a":1}').json(), { a: 1 })
|
||||
await t.rejects(Object.assign(new Body('a=1'), {url:'asdf'}).json(), {
|
||||
name: 'FetchError',
|
||||
message: 'invalid json response body at asdf reason: ' +
|
||||
'Unexpected token a in JSON at position 0',
|
||||
type: 'invalid-json',
|
||||
})
|
||||
})
|
||||
|
||||
t.test('write to streams', async t => {
|
||||
const w = body => Body.writeToStream(
|
||||
new Minipass({ encoding: 'utf8' }),
|
||||
{body}
|
||||
).concat()
|
||||
|
||||
t.equal(await w(), '')
|
||||
t.equal(await w(new Blob()), '')
|
||||
t.equal(await w('a=1'), 'a=1')
|
||||
t.equal(await w(Buffer.from('a=1')), 'a=1')
|
||||
t.equal(await w(new Minipass().end('a=1')), 'a=1')
|
||||
const s = new Minipass()
|
||||
setTimeout(() => s.emit('error', new Error('asdf')))
|
||||
await t.rejects(w(s), { message: 'asdf' })
|
||||
})
|
||||
|
||||
t.test('clone', t => {
|
||||
t.test('clone after use throws', async t => {
|
||||
const b = new Body('a=1')
|
||||
await b.text()
|
||||
t.throws(() => Body.clone(b), {
|
||||
message: 'cannot clone body after it is used'
|
||||
})
|
||||
})
|
||||
|
||||
t.test('clone formdata returns the form data', async t => {
|
||||
const f = new FormData()
|
||||
f.append('a','1')
|
||||
const b = new Body(f)
|
||||
t.equal(Body.clone(b), f)
|
||||
})
|
||||
|
||||
t.test('clone buffer returns the buffer', async t => {
|
||||
const buf = Buffer.from('a=1')
|
||||
const b = new Body(buf)
|
||||
t.equal(Body.clone(b), buf)
|
||||
})
|
||||
|
||||
t.test('clone stream tees the stream', async t => {
|
||||
const mp = new Minipass().end('a=1')
|
||||
const b = new Body(mp)
|
||||
const cloned = Body.clone(b)
|
||||
t.not(cloned, mp, 'new stream')
|
||||
t.not(b.body, mp, 'original body gets new stream')
|
||||
t.equal((await cloned.concat()).toString(), 'a=1')
|
||||
t.equal(await b.text(), 'a=1')
|
||||
})
|
||||
|
||||
t.test('clone stream proxies errors to both', t => {
|
||||
const mp = new Minipass().end('a=1')
|
||||
const b = new Body(mp)
|
||||
const cloned = Body.clone(b)
|
||||
const x = new Error('yolo')
|
||||
t.plan(2)
|
||||
cloned.once('error', er => t.equal(er, x))
|
||||
b.body.once('error', er => t.equal(er, x))
|
||||
setTimeout(() => mp.emit('error', x))
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('convert body', t => {
|
||||
const {convert} = require('encoding')
|
||||
|
||||
t.test('content-type header', async t => {
|
||||
const s = '中文'
|
||||
const b = new Body(convert(s, 'gbk'))
|
||||
b.headers = { get () { return 'text/plain; charset=gbk; qs=1' } }
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.test('html4 meta tag', async t => {
|
||||
const s = '<meta http-equiv="Content-Type" content="text/html; charset=gbk"><div>中文L</div>'
|
||||
const b = new Body(convert(s, 'gbk'))
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.test('html4 meta tag reversed', async t => {
|
||||
const s = '<meta content="text/html; charset=gbk" http-equiv="Content-Type"><div>中文L</div>'
|
||||
const b = new Body(convert(s, 'gbk'))
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
|
||||
t.test('html5 meta tag', async t => {
|
||||
const s = '<meta charset="gbk"><div>中文</div>'
|
||||
const b = new Body(convert(s, 'gbk'))
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.test('xml encoding', async t => {
|
||||
const s = '<?xml encoding="gbk"?><div>中文</div>'
|
||||
const b = new Body(convert(s, 'gbk'))
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.test('explicitly utf8', async t => {
|
||||
const s = '<?xml encoding="UTF-8"?><div>中文</div>'
|
||||
const b = new Body(s)
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.test('no encoding set', async t => {
|
||||
const s = '中文'
|
||||
const b = new Body(s)
|
||||
t.equal(await b.textConverted(), s)
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,76 @@
|
|||
'use strict'
|
||||
const FetchError = require('../lib/fetch-error.js')
|
||||
const t = require('tap')
|
||||
t.test('no underlying error', t => {
|
||||
const fe = new FetchError('foo')
|
||||
t.match(fe, {
|
||||
message: 'foo',
|
||||
code: 'FETCH_ERROR',
|
||||
errno: 'FETCH_ERROR',
|
||||
type: undefined,
|
||||
stack: String,
|
||||
name: 'FetchError',
|
||||
constructor: FetchError,
|
||||
})
|
||||
fe.name = 'fooblz'
|
||||
t.equal(fe.name, 'FetchError', 'cannot override name')
|
||||
t.equal(Object.prototype.toString.call(fe), '[object FetchError]', 'sets toStringTag')
|
||||
t.equal(String(fe), 'FetchError: foo', 'name shows up in toString')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('with underlying error', t => {
|
||||
const fe = new FetchError('xyz', 'xyz-problem', Object.assign(new Error('abc'), {
|
||||
code: 'ABC_ERROR',
|
||||
rando: 'property',
|
||||
}))
|
||||
t.match(fe, {
|
||||
message: 'xyz',
|
||||
code: 'ABC_ERROR',
|
||||
errno: 'ABC_ERROR',
|
||||
rando: 'property',
|
||||
type: 'xyz-problem',
|
||||
stack: String,
|
||||
name: 'FetchError',
|
||||
constructor: FetchError,
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('special handling of EBADSIZE', t => {
|
||||
const fe = new FetchError('xyz', 'xyz-problem', Object.assign(new Error('abc'), {
|
||||
code: 'EBADSIZE',
|
||||
expect: 5,
|
||||
found: 50,
|
||||
}))
|
||||
t.match(fe, {
|
||||
message: 'xyz',
|
||||
code: 'EBADSIZE',
|
||||
errno: 'EBADSIZE',
|
||||
type: 'max-size',
|
||||
expect: 5,
|
||||
found: 50,
|
||||
stack: String,
|
||||
name: 'FetchError',
|
||||
constructor: FetchError,
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('create custom FetchError', function funcName (t) {
|
||||
const systemError = new Error('system')
|
||||
systemError.code = 'ESOMEERROR'
|
||||
|
||||
const err = new FetchError('test message', 'test-error', systemError)
|
||||
t.match(err, Error)
|
||||
t.match(err, FetchError)
|
||||
t.equal(err.name, 'FetchError')
|
||||
t.equal(err.message, 'test message')
|
||||
t.equal(err.type, 'test-error')
|
||||
t.equal(err.code, 'ESOMEERROR')
|
||||
t.equal(err.errno, 'ESOMEERROR')
|
||||
// reading the stack is quite slow (~30-50ms)
|
||||
t.match(err.stack, `${err.name}: ${err.message}`)
|
||||
t.match(err.stack, 'funcName')
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1 @@
|
|||
i am a dummy
|
|
@ -0,0 +1,387 @@
|
|||
const http = require('http')
|
||||
const { parse } = require('url')
|
||||
const zlib = require('minizlib')
|
||||
const Minipass = require('minipass')
|
||||
const { multipart: Multipart } = require('parted')
|
||||
|
||||
let convert
|
||||
try { convert = require('encoding').convert; } catch(e) {}
|
||||
|
||||
class TestServer {
|
||||
constructor () {
|
||||
this.server = http.createServer((req, res) => this.router(req, res))
|
||||
this.port = 30000 + (+process.env.TAP_CHILD_ID || 1)
|
||||
this.hostname = 'localhost'
|
||||
// node 8 default keepalive timeout is 5000ms
|
||||
// make it shorter here as we want to close server
|
||||
// quickly at the end of tests
|
||||
this.server.keepAliveTimeout = 1000
|
||||
this.server.on('error', er => console.log(err.stack))
|
||||
this.server.on('connection', socket => socket.setTimeout(1500))
|
||||
}
|
||||
|
||||
start (cb) {
|
||||
this.server.listen(this.port, this.hostname, cb)
|
||||
}
|
||||
|
||||
stop (cb) {
|
||||
this.server.close(cb)
|
||||
}
|
||||
|
||||
router (req, res) {
|
||||
const p = parse(req.url).pathname
|
||||
|
||||
if (p === '/hello') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('world')
|
||||
}
|
||||
|
||||
if (p === '/plain') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('text')
|
||||
}
|
||||
|
||||
if (p === '/options') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Allow', 'GET, HEAD, OPTIONS')
|
||||
res.end('hello world')
|
||||
}
|
||||
|
||||
if (p === '/html') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.end('<html></html>')
|
||||
}
|
||||
|
||||
if (p === '/json') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.end(JSON.stringify({ name: 'value' }))
|
||||
}
|
||||
|
||||
if (p === '/gzip') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
new zlib.Gzip().end('hello world').concat().then(buf => res.end(buf))
|
||||
}
|
||||
|
||||
if (p === '/gzip-truncated') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
new zlib.Gzip().end('hello world').concat().then(buf =>
|
||||
res.end(buf.slice(0, buf.length - 8)))
|
||||
}
|
||||
|
||||
if (p === '/deflate') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'deflate')
|
||||
new zlib.Deflate().end('hello world').concat().then(buf => res.end(buf))
|
||||
}
|
||||
|
||||
if (p === '/brotli') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'br')
|
||||
// pre-compressed 'hello world', in-lined here so tests will run when the
|
||||
// client doesn't support brotli
|
||||
const buf = Buffer.from([0x0b, 0x05, 0x80, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, 0x03])
|
||||
res.end(buf)
|
||||
}
|
||||
|
||||
|
||||
if (p === '/deflate-raw') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'deflate')
|
||||
new zlib.DeflateRaw().end('hello world').concat().then(buf => res.end(buf))
|
||||
}
|
||||
|
||||
if (p === '/sdch') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'sdch')
|
||||
res.end('fake sdch string')
|
||||
}
|
||||
|
||||
if (p === '/invalid-content-encoding') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.end('fake gzip string')
|
||||
}
|
||||
|
||||
if (p === '/timeout') {
|
||||
setTimeout(() => {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('text')
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
if (p === '/slow') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.write('test')
|
||||
setTimeout(() => res.end('test'), 1000)
|
||||
}
|
||||
|
||||
if (p === '/cookie') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Set-Cookie', ['a=1', 'b=1'])
|
||||
res.end('cookie')
|
||||
}
|
||||
|
||||
if (p === '/size/chunk') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
setTimeout(() => res.write('test'), 10)
|
||||
setTimeout(() => res.end('test'), 20)
|
||||
}
|
||||
|
||||
if (p === '/size/long') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('testtest')
|
||||
}
|
||||
|
||||
if (p === '/encoding/gbk') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.end(convert('<meta charset="gbk"><div>中文</div>', 'gbk'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/gb2312') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.end(convert('<meta http-equiv="Content-Type" content="text/html; charset=gb2312"><div>中文</div>', 'gb2312'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/gb2312-reverse') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.end(convert('<meta content="text/html; charset=gb2312" http-equiv="Content-Type"><div>中文</div>', 'gb2312'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/shift-jis') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html; charset=Shift-JIS')
|
||||
res.end(convert('<div>日本語</div>', 'Shift_JIS'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/euc-jp') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/xml')
|
||||
res.end(convert('<?xml version="1.0" encoding="EUC-JP"?><title>日本語</title>', 'EUC-JP'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/utf8') {
|
||||
res.statusCode = 200
|
||||
res.end('中文')
|
||||
}
|
||||
|
||||
if (p === '/encoding/order1') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'charset=gbk; text/plain')
|
||||
res.end(convert('中文', 'gbk'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/order2') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/plain; charset=gbk; qs=1')
|
||||
res.end(convert('中文', 'gbk'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/chunked') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.setHeader('Transfer-Encoding', 'chunked')
|
||||
res.write('a'.repeat(10))
|
||||
res.end(convert('<meta http-equiv="Content-Type" content="text/html; charset=Shift_JIS" /><div>日本語</div>', 'Shift_JIS'))
|
||||
}
|
||||
|
||||
if (p === '/encoding/invalid') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'text/html')
|
||||
res.setHeader('Transfer-Encoding', 'chunked')
|
||||
res.write('a'.repeat(1200))
|
||||
res.end(convert('中文', 'gbk'))
|
||||
}
|
||||
|
||||
if (p === '/redirect/301') {
|
||||
res.statusCode = 301
|
||||
res.setHeader('Location', '/inspect')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/302') {
|
||||
res.statusCode = 302
|
||||
res.setHeader('Location', '/inspect')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/303') {
|
||||
res.statusCode = 303
|
||||
res.setHeader('Location', '/inspect')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/307') {
|
||||
res.statusCode = 307
|
||||
res.setHeader('Location', '/inspect')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/308') {
|
||||
res.statusCode = 308
|
||||
res.setHeader('Location', '/inspect')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/chain') {
|
||||
res.statusCode = 301
|
||||
res.setHeader('Location', '/redirect/301')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/no-location') {
|
||||
res.statusCode = 301
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/redirect/slow') {
|
||||
res.statusCode = 301
|
||||
res.setHeader('Location', '/redirect/301')
|
||||
setTimeout(() => res.end(), 1000)
|
||||
}
|
||||
|
||||
if (p === '/redirect/slow-chain') {
|
||||
res.statusCode = 301
|
||||
res.setHeader('Location', '/redirect/slow')
|
||||
setTimeout(() => res.end(), 10)
|
||||
}
|
||||
|
||||
if (p === '/redirect/slow-stream') {
|
||||
res.statusCode = 301
|
||||
res.setHeader('Location', '/slow')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/error/400') {
|
||||
res.statusCode = 400
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('client error')
|
||||
}
|
||||
|
||||
if (p === '/error/404') {
|
||||
res.statusCode = 404
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/error/500') {
|
||||
res.statusCode = 500
|
||||
res.setHeader('Content-Type', 'text/plain')
|
||||
res.end('server error')
|
||||
}
|
||||
|
||||
if (p === '/error/reset') {
|
||||
res.destroy()
|
||||
}
|
||||
|
||||
if (p === '/error/json') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.end('invalid json')
|
||||
}
|
||||
|
||||
if (p === '/no-content') {
|
||||
res.statusCode = 204
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/no-content/gzip') {
|
||||
res.statusCode = 204
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/no-content/brotli') {
|
||||
res.statusCode = 204
|
||||
res.setHeader('Content-Encoding', 'br')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/not-modified') {
|
||||
res.statusCode = 304
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/not-modified/gzip') {
|
||||
res.statusCode = 304
|
||||
res.setHeader('Content-Encoding', 'gzip')
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/inspect') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
let body = ''
|
||||
req.on('data', c => body += c)
|
||||
req.on('end', () => res.end(JSON.stringify({
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
headers: req.headers,
|
||||
body
|
||||
})))
|
||||
}
|
||||
|
||||
if (p === '/multipart') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
// the path option passed to the Multipart constructor cannot be an
|
||||
// absolute path in Windows, we set it here manually because the default
|
||||
// provided by 'parsed' is an absolute path
|
||||
// ref: https://github.com/chjj/parsed/issues/10
|
||||
const parser = new Multipart(req.headers['content-type'], { path: './' })
|
||||
let body = ''
|
||||
parser.on('part', (field, part) => body += field + '=' + part)
|
||||
parser.on('end', () => res.end(JSON.stringify({
|
||||
method: req.method,
|
||||
url: req.url,
|
||||
headers: req.headers,
|
||||
body: body
|
||||
})))
|
||||
req.pipe(parser)
|
||||
}
|
||||
|
||||
if (p === '/trailers') {
|
||||
res.statusCode = 200
|
||||
res.setHeader('Transfer-Encoding', 'chunked')
|
||||
res.setHeader('Trailer', 'X-Node-Fetch')
|
||||
res.write('Body of the response')
|
||||
res.addTrailers({ 'X-Node-Fetch': 'hello world!' })
|
||||
res.end()
|
||||
}
|
||||
|
||||
if (p === '/host-redirect') {
|
||||
if (req.headers.host !== `localhost:${this.port}`) {
|
||||
res.setHeader('location', `http://localhost:${this.port}/host-redirect`)
|
||||
res.statusCode = 302
|
||||
}
|
||||
res.end(`http://${req.headers.host}/host-redirect`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TestServer
|
||||
|
||||
if (require.main === module) {
|
||||
const server = new TestServer
|
||||
server.start(() =>
|
||||
console.log(`Server started listening at port ${server.port}`))
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
challenge
|
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIEhTCCA22gAwIBAgIJAKuew+59rKlnMA0GCSqGSIb3DQEBCwUAMIGjMQswCQYD
|
||||
VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAcMB09ha2xhbmQxFzAVBgNVBAoM
|
||||
Dk1pbmlwYXNzLCBJbmMuMSowKAYDVQQLDCFEZXBhcnRtZW50IG9mIE1ha2luZyBG
|
||||
ZXRjaCBIYXBwZW4xFzAVBgNVBAMMDm1pbmlwYXNzLWZldGNoMRcwFQYJKoZIhvcN
|
||||
AQkBFghpQGl6cy5tZTAeFw0yMDA3MjEyMjAzNTFaFw0yMjEwMjQyMjAzNTFaMGcx
|
||||
CzAJBgNVBAYTAnh5MQswCQYDVQQIDAJhYjELMAkGA1UEBwwCaWYxCzAJBgNVBAoM
|
||||
AmdvMQswCQYDVQQLDAJhbjELMAkGA1UEAwwCc3QxFzAVBgkqhkiG9w0BCQEWCGlA
|
||||
aXpzLm1lMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtLC1Zmxxwo5x
|
||||
3J4ApumdlRzkymeLgeyxPO+wZxDkOxMk0PVkhDvZdLfXWxGM/jMWGxcMzOFAgaPP
|
||||
XxFafPes4tRc28gdFBnr16nUeoWUF2ReMcHIk8nxq0wtV+lubkgaGcvUslS694VK
|
||||
a2gNXPYHpTzuRFMLqN2FlFDrS2/QB3mwbG3OJOwxsxOVxUboUfxbXweJB3mJy0r3
|
||||
LU81+LZjTqV0EY+RduIzhsfyhhatPXbS1S9XRZ4JMzxYZsxZueTDp0Ih/Y5elN9F
|
||||
rq+7UvGZLQl7+j3pnPWDkv8PxO5JVpFVfH2WHJqGyMvSm7W+St3PgyrVcMFh654n
|
||||
0+d4LEA+awIDAQABo4H2MIHzMIHCBgNVHSMEgbowgbehgamkgaYwgaMxCzAJBgNV
|
||||
BAYTAlVTMQswCQYDVQQIDAJDQTEQMA4GA1UEBwwHT2FrbGFuZDEXMBUGA1UECgwO
|
||||
TWluaXBhc3MsIEluYy4xKjAoBgNVBAsMIURlcGFydG1lbnQgb2YgTWFraW5nIEZl
|
||||
dGNoIEhhcHBlbjEXMBUGA1UEAwwObWluaXBhc3MtZmV0Y2gxFzAVBgkqhkiG9w0B
|
||||
CQEWCGlAaXpzLm1lggkAp3aRU/tiwHQwCQYDVR0TBAIwADALBgNVHQ8EBAMCBPAw
|
||||
FAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBCwUAA4IBAQApZMXMPu9m
|
||||
eNDlejni3PlO4KqnAXmMMY9SIObMTtuQtMdujPRqwSqegQUOZN5wfoyNvPSc5wkU
|
||||
8TRu1gAH+vg9WCk0X7VlXA7q5ieQCdmgdUzl0vudy3omK9YN/6g7svBwxqn0B/g4
|
||||
j0wExC+RF6MfcF9ycOVSi7ppBUqA7UksbPb0tYNNulDWn3TnrNLiJdEI7SDRZG5f
|
||||
iB0P7h0PlHkp08Me8iqWtGlzLlDbObW57uwBXdCKHSS+/z+zrnmplkBRNICgs4Bt
|
||||
NukMjBmoeFTrzGyStr83VoKqLEd/CtldkZoQ/v37otLuJROQxoulWQ47dyqs6CS7
|
||||
LUwcQI3YOXkS
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,17 @@
|
|||
-----BEGIN CERTIFICATE REQUEST-----
|
||||
MIICrDCCAZQCAQAwZzELMAkGA1UEBhMCeHkxCzAJBgNVBAgMAmFiMQswCQYDVQQH
|
||||
DAJpZjELMAkGA1UECgwCZ28xCzAJBgNVBAsMAmFuMQswCQYDVQQDDAJzdDEXMBUG
|
||||
CSqGSIb3DQEJARYIaUBpenMubWUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
|
||||
AoIBAQC0sLVmbHHCjnHcngCm6Z2VHOTKZ4uB7LE877BnEOQ7EyTQ9WSEO9l0t9db
|
||||
EYz+MxYbFwzM4UCBo89fEVp896zi1FzbyB0UGevXqdR6hZQXZF4xwciTyfGrTC1X
|
||||
6W5uSBoZy9SyVLr3hUpraA1c9gelPO5EUwuo3YWUUOtLb9AHebBsbc4k7DGzE5XF
|
||||
RuhR/FtfB4kHeYnLSvctTzX4tmNOpXQRj5F24jOGx/KGFq09dtLVL1dFngkzPFhm
|
||||
zFm55MOnQiH9jl6U30Wur7tS8ZktCXv6Pemc9YOS/w/E7klWkVV8fZYcmobIy9Kb
|
||||
tb5K3c+DKtVwwWHrnifT53gsQD5rAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEA
|
||||
PaDycMfRTHep7etzRB+KCkETU/dr5aTRQQOttJDykZgXhhyMvVm7ZQa/bASkikCV
|
||||
RemiCDYQvSHayBJ0FivxbyPwc0BDO9ucXTZ5+PDmQeqjo+nHXWVrDwyE6Y95wZG8
|
||||
5mI35yv6bw3OLy0KysV/FTDy0z9njzSYCed9SblLUW7SF0AlAEE//djsAdwNODov
|
||||
Nyy5DYpLDYVb00hAYDscf+pkdeckzvfqIWfe8MWf92jbGzlmMBedeNYsZxFvRAgB
|
||||
/KhxeviODBHpvCiusHL9VQLwdx0InZf4+BVNeBMn7AFKm+Zc6iGi8AFZzICg5NFj
|
||||
IiD6tuE4ZWJhqcWUhbQeFQ==
|
||||
-----END CERTIFICATE REQUEST-----
|
|
@ -0,0 +1,6 @@
|
|||
authorityKeyIdentifier=keyid,issuer
|
||||
basicConstraints=CA:FALSE
|
||||
keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = localhost
|
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEowIBAAKCAQEAtLC1Zmxxwo5x3J4ApumdlRzkymeLgeyxPO+wZxDkOxMk0PVk
|
||||
hDvZdLfXWxGM/jMWGxcMzOFAgaPPXxFafPes4tRc28gdFBnr16nUeoWUF2ReMcHI
|
||||
k8nxq0wtV+lubkgaGcvUslS694VKa2gNXPYHpTzuRFMLqN2FlFDrS2/QB3mwbG3O
|
||||
JOwxsxOVxUboUfxbXweJB3mJy0r3LU81+LZjTqV0EY+RduIzhsfyhhatPXbS1S9X
|
||||
RZ4JMzxYZsxZueTDp0Ih/Y5elN9Frq+7UvGZLQl7+j3pnPWDkv8PxO5JVpFVfH2W
|
||||
HJqGyMvSm7W+St3PgyrVcMFh654n0+d4LEA+awIDAQABAoIBAClb3VnBbtSiuEtQ
|
||||
W0PZa3mLMI9n3hXyMKuLDay5wBQJkL7HvKdL7714qzGsNcKlvOSchRCMarCB52CS
|
||||
X00FgCw3gb3I82b7e/FUbU9SLhCgp7Lp8VhqvAeCm0ppIx7ZIChRcLEVFeq8NsmY
|
||||
+p4RrrRS2xMGkz+m3QGS+Bi/UjWzcPX3qlDaeWxP3Xzih/XDfYDdQei+DQXbsfZH
|
||||
3pngiqC7/tVef6ckBYMcEYRz2CETqQNCA35dnp/CZpQ5H5pC/qNJX7sSYJO91ZTb
|
||||
5uTujt4JBRGkefzDJFuzpYRhCtiMKTRgjKwk10clOlEmclpVmeiSeenICL39Mi+e
|
||||
DsVnNRkCgYEA76yykUX62SryuTLmo20jPzl2tHCXNRjBjpmxv43VkXhvPZJEU7fR
|
||||
qK/iVwNGSvNCSQxKtTByLCceJqwMb2F1V1jwkNdDJ+T1GGga8dGpOB1EESosr2vw
|
||||
WM6lhz3CxutoJTa7YjmziMuTOcYtE3xhzovTks68Pza0m9YoAIQ+I/8CgYEAwP96
|
||||
P1YmKK8ECpWvupcBKH8CoJLbeXmlJJ9BtAdDp5PrKoLSIHNz67w7CzrpJzXZglh3
|
||||
BoYgSw/1D4ChKpLSw2LzXW70/MyCcnKRYNPwwEbyKUlx8aiudH4kXtYjwgMChbXd
|
||||
wz30iPQ4CwNb3JLKOl8k1yZY2aG28c6tUfNetZUCgYAJ4Lc3T4gIHUIFqfhhceUK
|
||||
/QZMZ3uD37JSezkdKO5NYYZMJlQUkzXb2uvcJDFoc2Ae/JezofyCn1YZx+t3R6/7
|
||||
WpoHjiehZElJqTi7EKYFvwcIIhHXZP5x2opt6Xi2lAslxXyxjqk8kQ9PSUCgVfb9
|
||||
+TtOCKEvhcSpy4i4hLq+5wKBgEK40AmPffe4sdv67drDE7ptVnou60Nuw6IKkMtt
|
||||
a31GzRlQSta/M0c/NuZmAm701fKTJOsTeZyZsq4eWRl/0u+LiPk3P+kZxstMQmhI
|
||||
PUYsANI3OvZBy7YoWeiTfZ84LSoOutEh3SVv0OQ10A9MjC9r7y+WaUcr+jRUsGTR
|
||||
j1+VAoGBAOxP7YcUuPH8DXxoybVAOUAvM6URW+E4fYg75YsKNzpdbDXhG+6V1FT8
|
||||
NJZAipbOx1nBRfhfqDN7+2px45oWus/nMFdkue4u8c6jY7WOgXiNJQoea7vE262M
|
||||
921loT4Sjamsns2pZ7jYwVe8DW6wk/wrqin1FBukQlwH72BpUi+U
|
||||
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,30 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
Proc-Type: 4,ENCRYPTED
|
||||
DEK-Info: DES-EDE3-CBC,230B31249452E992
|
||||
|
||||
Wwb2pZmJqVr767ZRsVAp/dOQbaKgEomAFioQh3e5Ena/ly7P0x9LJvoLHJC7Az+l
|
||||
wwX9MYMqxD/ZkpGi8/7KH/e6smF5t5V1fAGVxqYTwoVw8QvNyRNBoiGdV9c+HhJ8
|
||||
+wt1HKSK36hvY3qJJ7Iu1K1xrvMmwyehUWddW7qbjvwZVo/GCOo7mUFkB574EyBo
|
||||
UgnRgLV6k/ISS6WTrrxPwkBQpt93Bc5Bv1FtXRYXJ9O1id6cA/bBc+dsy91YtrtX
|
||||
71p317cfZemHpje5pU/yepE52+4LyO04lo6PMMV/jjV5cFeBgzEitaAIjsY6Y2qc
|
||||
/u6UDn2GN7Ir9XuJZfHhjxgZVYq3SM/iQp+Htat3XDk6S8B/kz8zEDYhqGAjMtaU
|
||||
gFNwCFVqnj7BDqeOescHCSwsT3wauYGGfIyzXM78Rtj+mse6dpqyWta+FYjtFuAA
|
||||
tm86lj0et+anm/Y/SGom6E+Rt1Hx1OucIhBuyDXx5G8W40oYjmK5mrXqC5I1Iwux
|
||||
QhJkMr3o90OrT3vka3hay04RjZ2iSr2bYeoP8zSTdXhnTpbewDN921Xg7YbAo1Q0
|
||||
8Ozi+MgywsJvei0eY0xQD/BuWGxArLsllH+QcHqr4FScjtUlnTtSrzOuH396TwJh
|
||||
IP+stbuMlRCisTfzE4Ls5R6YiV3KbgdUCLjPSIq/xPzedpV+pS689eTRw8xu1ffA
|
||||
d2E7C5VKFjnlue+hFCS2POhEgmHGtzQQM87KT6ok1aS1nK9s5GGzOVcPSsH5jflE
|
||||
7mc1yPewkBgndWjhvRf5OOX6XuBoAbQ7AbyACtEDKuH9/SDLsyihX5NrFKMRpPuq
|
||||
1fFMzZ8yiB+OVATouMeMXAqgCpEzHZVOE52cjNlkHXpJ4mpuyRbmlFzBCdnrK1ar
|
||||
PfeUaT9//ySORWBFAQ5bZu5NIOgQMCd8LokPvos3wcknJ1lhr72cN0NquQQbfUew
|
||||
tq0AhQZv1WW5iUEvw7VgsPK7USt0uZfQXYrkXxq/VeBUCad2qC1QNJNKNulOHtt/
|
||||
RrTCtAp+4LiXuW61NS2KiT8ijNrb1BTcZSfRPC0qKxD39cIFP6YPXppqJvJsEpJh
|
||||
GAXggu4O/G+RH0Bm2yN2SiN+PjAjNBIhW7/qaiQ2ZrggZ6UFX/GE7XkXJpQbmoJ1
|
||||
MXmUSnkks969Zzr3XHcbuSWINYRDZ2/KmCMjHldBasT6WsZNi9n9SMPqoZiDuDQb
|
||||
WmwIk1kBPwuK6+Mp73rI+S3R9/6tgaKvg+OV6CERyyvD56c5JhQ8G1aEL/SkMwBY
|
||||
hOi7w7IHP40IbIsnVtQbv4nirCK4tZyruvoZqIEfGblO5BVa2PYGKZW98Sv+KSVU
|
||||
ll2DQxcn9/XfMYIFpGzV6syJU9aqd57KYamgkxx5jOwjk5WZBAxvj5ZLV8NdFpSH
|
||||
XiYyJ6GZodvgrZ7LezzzFNOk6likKSpVljdpaX0rlfIrMeYJayNZIYq1ZguJ9VYa
|
||||
FS5eu3MxVQnZPAmK0JTjLv9UJzDMxLAunU2nneHI8tooEf1U1deR5goJSX1n+kUZ
|
||||
qfOvnrWS8qJBsJDWiQmxzKnoEtm2pIPQDK1/UYbzj27Ks7P6z7Sphg6TB0ZhAWjh
|
||||
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,23 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDxjCCAq4CCQCndpFT+2LAdDANBgkqhkiG9w0BAQsFADCBozELMAkGA1UEBhMC
|
||||
VVMxCzAJBgNVBAgMAkNBMRAwDgYDVQQHDAdPYWtsYW5kMRcwFQYDVQQKDA5NaW5p
|
||||
cGFzcywgSW5jLjEqMCgGA1UECwwhRGVwYXJ0bWVudCBvZiBNYWtpbmcgRmV0Y2gg
|
||||
SGFwcGVuMRcwFQYDVQQDDA5taW5pcGFzcy1mZXRjaDEXMBUGCSqGSIb3DQEJARYI
|
||||
aUBpenMubWUwIBcNMjAwNzIxMjE0MDQzWhgPMjA3MDA3MDkyMTQwNDNaMIGjMQsw
|
||||
CQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAcMB09ha2xhbmQxFzAVBgNV
|
||||
BAoMDk1pbmlwYXNzLCBJbmMuMSowKAYDVQQLDCFEZXBhcnRtZW50IG9mIE1ha2lu
|
||||
ZyBGZXRjaCBIYXBwZW4xFzAVBgNVBAMMDm1pbmlwYXNzLWZldGNoMRcwFQYJKoZI
|
||||
hvcNAQkBFghpQGl6cy5tZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
||||
AL7zb0N9pSWmaFNT0zz7CEYFf7sO541NYY+Zkt2qF+beIT9M+0NtrWy8ekOtG7Fe
|
||||
N2/ElPXxsAtrTWRHIiRvya8iEKBZT0nuG0pAG0C+VMalzsFfPorEXFwBsFkjerL4
|
||||
9iU9CV11KhVxxNnTPTkk9hjsI2xag1GP8gykoNY+j4x2uMvx3B1uPMOXNWHtNILn
|
||||
Sdy83if/Tvvmx9t1BEXWsMhZeI4mVE0P3TRgBglS1pvE9MF1TlB9AdsXaOgRjGeK
|
||||
uiuDhHM8FTduc5vYvzwu2uVVMtROB5zxX9QDNk9Bg1LjQh3vJEJZxYoB4lxJuZ4K
|
||||
bA+36HkJ18AFz5ae9JxEHLMCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAFLQyVAIE
|
||||
ImI72TPEDnpZb71xOi8m+xAGtHKOtswbf3WprE3WLnSnPP7iFkphcFhG/Jny3c25
|
||||
B98SoVSOHwzYo9CvOPhcZBy1ErIv+X/gUKt2NZmX5KcwF2oH6/yUCfC5ikJpR00x
|
||||
bHmVa3ROoYJqfR9vH7o9lIR7Yb+Pb1yIGpEENwn3kr9UB0sA9l45TWmK1N6dPOlR
|
||||
BajY/AhRqoPw2miyGusJf5FaLlLNri0QkiJIyf0v/3+goayoyn/2OwqzjkR1DbPg
|
||||
1KXZIMonv5UfEWVi4w+1/evNRVOB1g0v7wj//bdomGWPvFPtRJe+Zfb1jsLEpm99
|
||||
N58Aw1gH7zKe9Q==
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1 @@
|
|||
C2917CDC044F93C2
|
|
@ -0,0 +1 @@
|
|||
minipassphrase
|
|
@ -0,0 +1,21 @@
|
|||
const https = require('https')
|
||||
const {readFileSync: read} = require('fs')
|
||||
const ca = read(__dirname + '/minipass-CA.pem')
|
||||
const server = https.createServer({
|
||||
key: read(__dirname + '/localhost.key'),
|
||||
cert: read(__dirname + '/localhost.crt'),
|
||||
}, (q,s) => {
|
||||
s.end('ok\n' + JSON.stringify(q.headers, 0, 2) + '\n')
|
||||
server.close()
|
||||
})
|
||||
server.listen(8443, () => {
|
||||
https.get({
|
||||
host: 'localhost',
|
||||
path: '/hello',
|
||||
port: 8443,
|
||||
ca,
|
||||
}, res => {
|
||||
console.error(res.statusCode, res.headers)
|
||||
res.pipe(process.stdout)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/sh
|
||||
|
||||
if [ "$#" -ne 1 ]
|
||||
then
|
||||
echo "Usage: Must supply a domain"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DOMAIN=$1
|
||||
|
||||
openssl genrsa -out $DOMAIN.key 2048
|
||||
openssl req -new -key $DOMAIN.key -out $DOMAIN.csr
|
||||
|
||||
cat > $DOMAIN.ext << EOF
|
||||
authorityKeyIdentifier=keyid,issuer
|
||||
basicConstraints=CA:FALSE
|
||||
keyUsage = digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = $DOMAIN
|
||||
EOF
|
||||
|
||||
openssl x509 -req -in $DOMAIN.csr -CA ./minipass-CA.pem -CAkey ./minipass-CA.key -CAcreateserial -out $DOMAIN.crt -days 825 -sha256 -extfile $DOMAIN.ext
|
|
@ -0,0 +1,280 @@
|
|||
const t = require('tap')
|
||||
const Headers = require('../lib/headers.js')
|
||||
|
||||
t.Test.prototype.addAssert('contain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.ok(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
t.Test.prototype.addAssert('notContain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to not be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.notOk(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
|
||||
t.test('should have attributes conforming to Web IDL', t => {
|
||||
const headers = new Headers()
|
||||
t.same(Object.getOwnPropertyNames(headers), [])
|
||||
const enumerableProperties = []
|
||||
for (const property in headers) {
|
||||
enumerableProperties.push(property)
|
||||
}
|
||||
t.same(enumerableProperties.sort(), [
|
||||
'append',
|
||||
'delete',
|
||||
'entries',
|
||||
'forEach',
|
||||
'get',
|
||||
'has',
|
||||
'keys',
|
||||
'set',
|
||||
'values',
|
||||
])
|
||||
|
||||
t.equal(String(headers), '[object Headers]')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('not-found key returns null', t => {
|
||||
const h = new Headers([['foo', 'bar']])
|
||||
t.equal(h.has('baz'), false)
|
||||
t.equal(h.get('baz'), null)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('set two times', t => {
|
||||
const h = new Headers()
|
||||
h.set('foo', 'bar')
|
||||
h.set('foo', 'baz')
|
||||
t.equal(h.get('foo'), 'baz')
|
||||
h.append('foo', 'bar')
|
||||
t.equal(h.get('foo'), 'baz, bar')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('node compatible headers', t => {
|
||||
const h = new Headers()
|
||||
h.set('foo', 'bar')
|
||||
t.same(Headers.exportNodeCompatibleHeaders(h), {
|
||||
foo: ['bar'],
|
||||
})
|
||||
h.set('host', 'example.com')
|
||||
t.same(Headers.exportNodeCompatibleHeaders(h), {
|
||||
foo: ['bar'],
|
||||
host: 'example.com',
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('create headers lenient', t => {
|
||||
const h = Headers.createHeadersLenient({
|
||||
'💩': ['ignore', 'these'],
|
||||
badList: ['ok', '💩', 'bar'],
|
||||
badStr: '💩',
|
||||
goodstr: 'good',
|
||||
})
|
||||
|
||||
t.same(Headers.exportNodeCompatibleHeaders(h), {
|
||||
badList: ['ok', 'bar'],
|
||||
goodstr: ['good'],
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('delete', t => {
|
||||
const h = new Headers([['foo', 'bar']])
|
||||
t.equal(h.has('foo'), true)
|
||||
h.delete('foo')
|
||||
t.equal(h.has('foo'), false)
|
||||
// another time just to make sure it's fine with that, and for coverage
|
||||
h.delete('foo')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('iterating through all headers with forEach', t => {
|
||||
const headers = new Headers([
|
||||
['b', '2'],
|
||||
['c', '4'],
|
||||
['b', '3'],
|
||||
['a', '1'],
|
||||
])
|
||||
|
||||
const result = []
|
||||
headers.forEach((val, key) => {
|
||||
result.push([key, val])
|
||||
})
|
||||
|
||||
t.same(result, [
|
||||
['a', '1'],
|
||||
['b', '2, 3'],
|
||||
['c', '4']
|
||||
])
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('iteration', t => {
|
||||
const headers = new Headers([
|
||||
['b', '2'],
|
||||
['c', '4'],
|
||||
['a', '1'],
|
||||
])
|
||||
headers.append('b', '3')
|
||||
|
||||
const result = []
|
||||
for (let pair of headers) {
|
||||
result.push(pair)
|
||||
}
|
||||
t.same(result, [
|
||||
['a', '1'],
|
||||
['b', '2, 3'],
|
||||
['c', '4'],
|
||||
], 'iterating with for loop')
|
||||
|
||||
t.same(Array.from(headers.entries()), [
|
||||
['a', '1'],
|
||||
['b', '2, 3'],
|
||||
['c', '4'],
|
||||
], 'entries')
|
||||
|
||||
const keys = headers.keys()
|
||||
t.equal(String(keys), '[object HeadersIterator]')
|
||||
t.same(Array.from(keys), ['a', 'b', 'c'], 'keys')
|
||||
|
||||
t.same(Array.from(headers.values()), ['1', '2, 3', '4'], 'values')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('reject illegal header', t => {
|
||||
const headers = new Headers()
|
||||
t.throws(() => new Headers({ 'He y': 'ok' }), TypeError)
|
||||
t.throws(() => new Headers({ 'Hé-y': 'ok' }), TypeError)
|
||||
t.throws(() => new Headers({ 'He-y': 'ăk' }), TypeError)
|
||||
t.throws(() => headers.append('Hé-y', 'ok'), TypeError)
|
||||
t.throws(() => headers.delete('Hé-y'), TypeError)
|
||||
t.throws(() => headers.get('Hé-y'), TypeError)
|
||||
t.throws(() => headers.has('Hé-y'), TypeError)
|
||||
t.throws(() => headers.set('Hé-y', 'ok'), TypeError)
|
||||
// should reject empty header
|
||||
t.throws(() => headers.append('', 'ok'), TypeError)
|
||||
|
||||
// 'o k' is valid value but invalid name
|
||||
new Headers({ 'He-y': 'o k' })
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should ignore unsupported attributes while reading headers', t => {
|
||||
class FakeHeader {}
|
||||
// prototypes are currently ignored
|
||||
// This might change in the future: #181
|
||||
FakeHeader.prototype.z = 'fake'
|
||||
|
||||
const res = new FakeHeader
|
||||
res.a = 'string'
|
||||
res.b = ['1','2']
|
||||
res.c = ''
|
||||
res.d = []
|
||||
res.e = 1
|
||||
res.f = [1, 2]
|
||||
res.g = { a:1 }
|
||||
res.h = undefined
|
||||
res.i = null
|
||||
res.j = NaN
|
||||
res.k = true
|
||||
res.l = false
|
||||
res.m = Buffer.from('test')
|
||||
|
||||
const h1 = new Headers(res)
|
||||
h1.set('n', [1, 2])
|
||||
h1.append('n', ['3', 4])
|
||||
|
||||
const h1Raw = h1.raw()
|
||||
|
||||
t.contain(h1Raw.a, 'string')
|
||||
t.contain(h1Raw.b, '1,2')
|
||||
t.contain(h1Raw.c, '')
|
||||
t.contain(h1Raw.d, '')
|
||||
t.contain(h1Raw.e, '1')
|
||||
t.contain(h1Raw.f, '1,2')
|
||||
t.contain(h1Raw.g, '[object Object]')
|
||||
t.contain(h1Raw.h, 'undefined')
|
||||
t.contain(h1Raw.i, 'null')
|
||||
t.contain(h1Raw.j, 'NaN')
|
||||
t.contain(h1Raw.k, 'true')
|
||||
t.contain(h1Raw.l, 'false')
|
||||
t.contain(h1Raw.m, 'test')
|
||||
t.contain(h1Raw.n, '1,2')
|
||||
t.contain(h1Raw.n, '3,4')
|
||||
|
||||
t.equal(h1Raw.z, undefined)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should wrap headers', t => {
|
||||
const h1 = new Headers({ a: '1' })
|
||||
const h1Raw = h1.raw()
|
||||
|
||||
const h2 = new Headers(h1)
|
||||
h2.set('b', '1')
|
||||
const h2Raw = h2.raw()
|
||||
|
||||
const h3 = new Headers(h2)
|
||||
h3.append('a', '2')
|
||||
const h3Raw = h3.raw()
|
||||
|
||||
t.contain(h1Raw.a, '1')
|
||||
t.notContain(h1Raw.a, '2')
|
||||
|
||||
t.contain(h2Raw.a, '1')
|
||||
t.notContain(h2Raw.a, '2')
|
||||
t.contain(h2Raw.b, '1')
|
||||
|
||||
t.contain(h3Raw.a, '1')
|
||||
t.contain(h3Raw.a, '2')
|
||||
t.contain(h3Raw.b, '1')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should accept headers as an iterable of tuples', t => {
|
||||
let headers
|
||||
|
||||
headers = new Headers([
|
||||
['a', '1'],
|
||||
['b', '2'],
|
||||
['a', '3']
|
||||
])
|
||||
t.equal(headers.get('a'), '1, 3')
|
||||
t.equal(headers.get('b'), '2')
|
||||
|
||||
headers = new Headers([
|
||||
new Set(['a', '1']),
|
||||
['b', '2'],
|
||||
new Map([['a', null], ['3', null]]).keys()
|
||||
])
|
||||
t.equal(headers.get('a'), '1, 3')
|
||||
t.equal(headers.get('b'), '2')
|
||||
|
||||
headers = new Headers(new Map([
|
||||
['a', '1'],
|
||||
['b', '2']
|
||||
]))
|
||||
t.equal(headers.get('a'), '1')
|
||||
t.equal(headers.get('b'), '2')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should throw a TypeError if non-tuple exists in a headers initializer', t => {
|
||||
t.throws(() => new Headers([ ['b', '2', 'huh?'] ]), TypeError)
|
||||
t.throws(() => new Headers([ 'b2' ]), TypeError)
|
||||
t.throws(() => new Headers('b2'), TypeError)
|
||||
t.throws(() => new Headers({ [Symbol.iterator]: 42 }), TypeError)
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,47 @@
|
|||
// verify that passing a custom CA cert will work with minipass-fetch
|
||||
// Ie, going a different direction than the decision node-fetch made
|
||||
// https://github.com/node-fetch/node-fetch/issues/15
|
||||
const t = require('tap')
|
||||
const fetch = require('../')
|
||||
const { resolve } = require('path')
|
||||
const fixtures = resolve(__dirname, 'fixtures/tls')
|
||||
const { readFileSync: read } = require('fs')
|
||||
|
||||
const ca = read(`${fixtures}/minipass-CA.pem`)
|
||||
const cert = read(`${fixtures}/localhost.crt`)
|
||||
const key = read(`${fixtures}/localhost.key`)
|
||||
const { createServer } = require('https')
|
||||
const port = 30000 + (+process.env.TAP_CHILD_ID || 1)
|
||||
const base = `https://localhost:${port}/`
|
||||
|
||||
t.test('setup server', { bail: true }, t => {
|
||||
const server = createServer({
|
||||
cert,
|
||||
key,
|
||||
}, (q, s) => {
|
||||
s.setHeader('content-type', 'text/plain')
|
||||
s.setHeader('connection', 'close')
|
||||
s.end(`${q.method} ${q.url}`)
|
||||
})
|
||||
server.listen(port, () => {
|
||||
t.parent.teardown(() => server.close())
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('make https request without ca, should fail', t =>
|
||||
t.rejects(fetch(`${base}hello`), {
|
||||
name: 'FetchError',
|
||||
message: `request to ${base}hello failed, reason: unable to verify the first certificate`,
|
||||
code: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE',
|
||||
errno: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE',
|
||||
type: 'system',
|
||||
}))
|
||||
|
||||
t.test('make https request with rejectUnauthorized:false, succeeds', async t =>
|
||||
t.equal(await (await fetch(`${base}hello`, { rejectUnauthorized: false })).text(),
|
||||
'GET /hello'))
|
||||
|
||||
t.test('make https request with ca, succeeds', async t =>
|
||||
t.equal(await (await fetch(`${base}hello`, { ca })).text(),
|
||||
'GET /hello'))
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,428 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const Request = require('../lib/request.js')
|
||||
const stringToArrayBuffer = require('string-to-arraybuffer')
|
||||
const Minipass = require('minipass')
|
||||
const base = 'http://localhost:12345/'
|
||||
const FormData = require('form-data')
|
||||
const { AbortController } = require('abortcontroller-polyfill/dist/abortcontroller')
|
||||
const Blob = require('../lib/blob.js')
|
||||
const http = require('http')
|
||||
const {parse} = require('url')
|
||||
|
||||
t.Test.prototype.addAssert('contain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.ok(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
t.Test.prototype.addAssert('notContain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to not be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.notOk(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
|
||||
t.test('should have attributes conforming to Web IDL', t => {
|
||||
const req = new Request({ href: 'https://github.com/' })
|
||||
t.equal(req.url, 'https://github.com/')
|
||||
t.equal(String(req), '[object Request]')
|
||||
const enumerableProperties = []
|
||||
for (const property in req) {
|
||||
enumerableProperties.push(property)
|
||||
}
|
||||
for (const toCheck of [
|
||||
'body', 'bodyUsed', 'arrayBuffer', 'blob', 'json', 'text',
|
||||
'method', 'url', 'headers', 'redirect', 'clone', 'signal',
|
||||
]) {
|
||||
t.contain(enumerableProperties, toCheck)
|
||||
}
|
||||
for (const toCheck of [
|
||||
'body', 'bodyUsed', 'method', 'url', 'headers', 'redirect', 'signal',
|
||||
]) {
|
||||
t.throws(() => req[toCheck] = 'abc')
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('signal must be a signal', t => {
|
||||
t.throws(() => new Request('http://foo.com', { signal: {} }), TypeError)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should support wrapping Request instance', t => {
|
||||
const url = `${base}hello`
|
||||
|
||||
const form = new FormData()
|
||||
form.append('a', '1')
|
||||
const { signal } = new AbortController()
|
||||
|
||||
const r1 = new Request(url, {
|
||||
method: 'POST',
|
||||
follow: 1,
|
||||
body: form,
|
||||
signal,
|
||||
rejectUnauthorized: false,
|
||||
})
|
||||
const r2 = new Request(r1, {
|
||||
follow: 2
|
||||
})
|
||||
|
||||
t.equal(r2.url, url)
|
||||
t.equal(r2.method, 'POST')
|
||||
t.equal(r2.signal, signal)
|
||||
// note that we didn't clone the body
|
||||
t.equal(r2.body, form)
|
||||
t.equal(r1.follow, 1)
|
||||
t.equal(r2.follow, 2)
|
||||
t.equal(r1.counter, 0)
|
||||
t.equal(r2.counter, 0)
|
||||
t.same(Request.getNodeRequestOptions(r1), Request.getNodeRequestOptions(r2))
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should override signal on derived Request instances', t => {
|
||||
const parentAbortController = new AbortController()
|
||||
const derivedAbortController = new AbortController()
|
||||
const parentRequest = new Request('test', {
|
||||
signal: parentAbortController.signal
|
||||
})
|
||||
const derivedRequest = new Request(parentRequest, {
|
||||
signal: derivedAbortController.signal
|
||||
})
|
||||
t.equal(parentRequest.signal, parentAbortController.signal)
|
||||
t.equal(derivedRequest.signal, derivedAbortController.signal)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should allow removing signal on derived Request instances', t => {
|
||||
const parentAbortController = new AbortController()
|
||||
const parentRequest = new Request(`test`, {
|
||||
signal: parentAbortController.signal
|
||||
})
|
||||
const derivedRequest = new Request(parentRequest, {
|
||||
signal: null
|
||||
})
|
||||
t.equal(parentRequest.signal, parentAbortController.signal)
|
||||
t.equal(derivedRequest.signal, null)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should throw error with GET/HEAD requests with body', t => {
|
||||
t.throws(() => new Request('.', { body: '' }), TypeError)
|
||||
t.throws(() => new Request('.', { body: 'a' }), TypeError)
|
||||
t.throws(() => new Request('.', { body: '', method: 'HEAD' }), TypeError)
|
||||
t.throws(() => new Request('.', { body: 'a', method: 'HEAD' }), TypeError)
|
||||
t.throws(() => new Request('.', { body: 'a', method: 'get' }), TypeError)
|
||||
t.throws(() => new Request('.', { body: 'a', method: 'head' }), TypeError)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should default to null as body', t => {
|
||||
const req = new Request('.')
|
||||
t.equal(req.body, null)
|
||||
return req.text().then(result => t.equal(result, ''))
|
||||
})
|
||||
|
||||
t.test('should support parsing headers', t => {
|
||||
const url = base
|
||||
const req = new Request(url, {
|
||||
headers: {
|
||||
a: '1'
|
||||
}
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
t.equal(req.headers.get('a'), '1')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should support arrayBuffer() method', t => {
|
||||
const url = base
|
||||
var req = new Request(url, {
|
||||
method: 'POST',
|
||||
body: 'a=1'
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
return req.arrayBuffer().then(function(result) {
|
||||
t.type(result, ArrayBuffer)
|
||||
const str = String.fromCharCode.apply(null, new Uint8Array(result))
|
||||
t.equal(str, 'a=1')
|
||||
})
|
||||
})
|
||||
|
||||
t.test('should support text() method', t => {
|
||||
const url = base
|
||||
const req = new Request(url, {
|
||||
method: 'POST',
|
||||
body: 'a=1'
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
return req.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support json() method', t => {
|
||||
const url = base
|
||||
const req = new Request(url, {
|
||||
method: 'POST',
|
||||
body: '{"a":1}'
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
return req.json().then(result => t.equal(result.a, 1))
|
||||
})
|
||||
|
||||
t.test('should support buffer() method', t => {
|
||||
const url = base
|
||||
const req = new Request(url, {
|
||||
method: 'POST',
|
||||
body: 'a=1'
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
return req.buffer().then(result => t.equal(result.toString(), 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support blob() method', t => {
|
||||
const url = base
|
||||
var req = new Request(url, {
|
||||
method: 'POST',
|
||||
body: Buffer.from('a=1')
|
||||
})
|
||||
t.equal(req.url, url)
|
||||
return req.blob().then(function(result) {
|
||||
t.type(result, Blob)
|
||||
t.equal(result.size, 3)
|
||||
t.equal(result.type, '')
|
||||
})
|
||||
})
|
||||
|
||||
t.test('should support arbitrary url', t => {
|
||||
const url = 'anything'
|
||||
const req = new Request(url)
|
||||
t.equal(req.url, 'anything')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should support clone() method', t => {
|
||||
const url = base
|
||||
const r = new Minipass().end('a=1')
|
||||
r.pause()
|
||||
setTimeout(() => r.resume())
|
||||
const body = r.pipe(new Minipass())
|
||||
const agent = new http.Agent()
|
||||
const { signal } = new AbortController()
|
||||
const req = new Request(url, {
|
||||
body,
|
||||
method: 'POST',
|
||||
redirect: 'manual',
|
||||
headers: {
|
||||
b: '2'
|
||||
},
|
||||
follow: 3,
|
||||
compress: false,
|
||||
agent,
|
||||
signal,
|
||||
})
|
||||
const cl = req.clone()
|
||||
t.equal(cl.url, url)
|
||||
t.equal(cl.method, 'POST')
|
||||
t.equal(cl.redirect, 'manual')
|
||||
t.equal(cl.headers.get('b'), '2')
|
||||
t.equal(cl.follow, 3)
|
||||
t.equal(cl.compress, false)
|
||||
t.equal(cl.method, 'POST')
|
||||
t.equal(cl.counter, 0)
|
||||
t.equal(cl.agent, agent)
|
||||
t.equal(cl.signal, signal)
|
||||
// clone body shouldn't be the same body
|
||||
t.not(cl.body, body)
|
||||
return Promise.all([cl.text(), req.text()]).then(results => {
|
||||
t.equal(results[0], 'a=1')
|
||||
t.equal(results[1], 'a=1')
|
||||
})
|
||||
})
|
||||
|
||||
t.test('should support ArrayBuffer as body', t => {
|
||||
const req = new Request('', {
|
||||
method: 'POST',
|
||||
body: stringToArrayBuffer('a=1')
|
||||
})
|
||||
return req.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support Uint8Array as body', t => {
|
||||
const req = new Request('', {
|
||||
method: 'POST',
|
||||
body: new Uint8Array(stringToArrayBuffer('a=1'))
|
||||
})
|
||||
return req.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support DataView as body', t => {
|
||||
const req = new Request('', {
|
||||
method: 'POST',
|
||||
body: new DataView(stringToArrayBuffer('a=1'))
|
||||
})
|
||||
return req.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should set rejectUnauthorized to true if NODE_TLS_REJECT_UNAUTHORIZED is not set', t => {
|
||||
const tlsRejectBefore = process.env.NODE_TLS_REJECT_UNAUTHORIZED
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = null;
|
||||
const req = new Request('http://a.b');
|
||||
t.equal(Request.getNodeRequestOptions(req).rejectUnauthorized, true);
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = tlsRejectBefore;
|
||||
t.end();
|
||||
})
|
||||
|
||||
t.test('should set rejectUnauthorized to false if NODE_TLS_REJECT_UNAUTHORIZED is set to \'0\'', t => {
|
||||
const tlsRejectBefore = process.env.NODE_TLS_REJECT_UNAUTHORIZED
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
||||
const req = new Request('http://a.b');
|
||||
t.equal(Request.getNodeRequestOptions(req).rejectUnauthorized, false);
|
||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = tlsRejectBefore;
|
||||
t.end();
|
||||
})
|
||||
|
||||
t.test('get node request options', t => {
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://a.b', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
accept: 'text/plain; q=1, *.*; q=0.8',
|
||||
},
|
||||
body: null,
|
||||
compress: true,
|
||||
})), {
|
||||
...(parse('http://a.b')),
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Length': ['0'],
|
||||
'Accept-Encoding': ['gzip,deflate'],
|
||||
'Connection': ['close'],
|
||||
'User-Agent': /^minipass-fetch\//,
|
||||
},
|
||||
agent: undefined,
|
||||
}, 'happy path')
|
||||
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://a.b', {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
accept: 'text/plain; q=1, *.*; q=0.8',
|
||||
},
|
||||
body: '123',
|
||||
compress: true,
|
||||
})), {
|
||||
...(parse('http://a.b')),
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Length': ['3'],
|
||||
'Accept-Encoding': ['gzip,deflate'],
|
||||
'Connection': ['close'],
|
||||
'User-Agent': /^minipass-fetch\//,
|
||||
},
|
||||
agent: undefined,
|
||||
}, 'happy path')
|
||||
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://a.b', {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
accept: 'text/plain; q=1, *.*; q=0.8',
|
||||
},
|
||||
body: null,
|
||||
compress: true,
|
||||
})), {
|
||||
...(parse('http://a.b')),
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Content-Length': undefined,
|
||||
'Accept-Encoding': ['gzip,deflate'],
|
||||
'Connection': ['close'],
|
||||
'User-Agent': /^minipass-fetch\//,
|
||||
},
|
||||
agent: undefined,
|
||||
}, 'happy path')
|
||||
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://x.y', {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'user-agent': 'abc',
|
||||
connection: 'whatevs',
|
||||
},
|
||||
body: 'xyz',
|
||||
compress: false,
|
||||
})), {
|
||||
href: 'http://x.y',
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Accept: ['*/*'],
|
||||
'user-agent': ['abc'],
|
||||
connection: ['whatevs'],
|
||||
'Content-Length': ['3'],
|
||||
},
|
||||
})
|
||||
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://x.y', {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'user-agent': 'abc',
|
||||
connection: 'whatevs',
|
||||
},
|
||||
body: new Minipass().end('xyz'),
|
||||
compress: false,
|
||||
})), {
|
||||
href: 'http://x.y',
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Accept: ['*/*'],
|
||||
'user-agent': ['abc'],
|
||||
connection: ['whatevs'],
|
||||
'Content-Length': undefined,
|
||||
},
|
||||
})
|
||||
|
||||
t.match(Request.getNodeRequestOptions(new Request('http://x.y', {
|
||||
method: 'GET',
|
||||
family: 6,
|
||||
})), {
|
||||
href: 'http://x.y',
|
||||
method: 'GET',
|
||||
family: 6,
|
||||
})
|
||||
|
||||
t.test('function as agent', t => {
|
||||
let agentCalled = false
|
||||
const agent = () => {
|
||||
agentCalled = true
|
||||
return 420
|
||||
}
|
||||
|
||||
Request.getNodeRequestOptions(new Request('http://a.b', { agent }), {
|
||||
method: 'GET',
|
||||
href: 'http://a.b',
|
||||
agent: 420,
|
||||
})
|
||||
|
||||
t.equal(agentCalled, true)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.throws(() => Request.getNodeRequestOptions(new Request('ok.html')), {
|
||||
message: 'Only absolute URLs are supported',
|
||||
constructor: TypeError,
|
||||
})
|
||||
|
||||
t.throws(() => Request.getNodeRequestOptions(new Request('xyz://ok.html')), {
|
||||
message: 'Only HTTP(S) protocols are supported',
|
||||
constructor: TypeError,
|
||||
})
|
||||
|
||||
t.throws(() => Request.getNodeRequestOptions(new Request('http://a.b', {
|
||||
method: 'POST',
|
||||
body: new (class extends Minipass {
|
||||
get destroy () { return undefined }
|
||||
})(),
|
||||
signal: new AbortController().signal,
|
||||
})), {
|
||||
message: 'Cancellation of streamed requests with AbortSignal is not supported'
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,169 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const Response = require('../lib/response.js')
|
||||
const stringToArrayBuffer = require('string-to-arraybuffer')
|
||||
const Blob = require('../lib/blob.js')
|
||||
const Minipass = require('minipass')
|
||||
const base = `http://localhost:123456/`
|
||||
|
||||
t.Test.prototype.addAssert('contain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.ok(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
t.Test.prototype.addAssert('notContain', 2, function (list, key, m, e) {
|
||||
m = m || 'expected item to not be contained in list'
|
||||
e.found = list
|
||||
e.wanted = key
|
||||
return this.notOk(list.indexOf(key) !== -1, m, e)
|
||||
})
|
||||
|
||||
t.test('should have attributes conforming to Web IDL', t => {
|
||||
const res = new Response()
|
||||
t.equal(String(res), '[object Response]')
|
||||
const enumerableProperties = []
|
||||
for (const property in res) {
|
||||
enumerableProperties.push(property)
|
||||
}
|
||||
for (const toCheck of [
|
||||
'body', 'bodyUsed', 'arrayBuffer', 'blob', 'json', 'text',
|
||||
'url', 'status', 'ok', 'redirected', 'statusText', 'headers', 'clone'
|
||||
]) {
|
||||
t.contain(enumerableProperties, toCheck)
|
||||
}
|
||||
for (const toCheck of [
|
||||
'body', 'bodyUsed', 'url', 'status', 'ok', 'redirected', 'statusText',
|
||||
'headers'
|
||||
]) {
|
||||
t.throws(() => res[toCheck] = 'abc')
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should support empty options', t => {
|
||||
const r = new Minipass().end('a=1')
|
||||
r.pause()
|
||||
setTimeout(() => r.resume())
|
||||
const res = new Response(r.pipe(new Minipass))
|
||||
return res.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support parsing headers', t => {
|
||||
const res = new Response(null, {
|
||||
headers: {
|
||||
a: '1'
|
||||
}
|
||||
})
|
||||
t.equal(res.headers.get('a'), '1')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should support text() method', t =>
|
||||
new Response('a=1').text().then(result => t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support json() method', t =>
|
||||
new Response('{"a":1}').json().then(result => t.equal(result.a, 1)))
|
||||
|
||||
t.test('should support buffer() method', t =>
|
||||
new Response('a=1').buffer().then(result =>
|
||||
t.equal(result.toString(), 'a=1')))
|
||||
|
||||
t.test('should support blob() method', t =>
|
||||
new Response('a=1', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/plain'
|
||||
}
|
||||
}).blob().then(result => {
|
||||
t.type(result, Blob)
|
||||
t.equal(result.size, 3)
|
||||
t.equal(result.type, 'text/plain')
|
||||
}))
|
||||
|
||||
t.test('should support clone() method', t => {
|
||||
const r = new Minipass().end('a=1')
|
||||
r.pause()
|
||||
setTimeout(() => r.resume())
|
||||
const body = r.pipe(new Minipass())
|
||||
const res = new Response(body, {
|
||||
headers: {
|
||||
a: '1'
|
||||
},
|
||||
url: base,
|
||||
status: 346,
|
||||
statusText: 'production'
|
||||
})
|
||||
const cl = res.clone()
|
||||
t.equal(cl.headers.get('a'), '1')
|
||||
t.equal(cl.url, base)
|
||||
t.equal(cl.status, 346)
|
||||
t.equal(cl.statusText, 'production')
|
||||
t.equal(cl.ok, false)
|
||||
// clone body shouldn't be the same body
|
||||
t.not(cl.body, body)
|
||||
return cl.text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support stream as body', t => {
|
||||
const r = new Minipass().end('a=1')
|
||||
r.pause()
|
||||
setTimeout(() => r.resume())
|
||||
const body = r.pipe(new Minipass())
|
||||
return new Response(body).text().then(result => t.equal(result, 'a=1'))
|
||||
})
|
||||
|
||||
t.test('should support string as body', t =>
|
||||
new Response('a=1').text().then(result => t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support buffer as body', t =>
|
||||
new Response(Buffer.from('a=1')).text().then(result =>
|
||||
t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support ArrayBuffer as body', t =>
|
||||
new Response(stringToArrayBuffer('a=1')).text().then(result =>
|
||||
t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support blob as body', t =>
|
||||
new Response(new Blob(['a=1'])).text().then(result =>
|
||||
t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support Uint8Array as body', t =>
|
||||
new Response(new Uint8Array(stringToArrayBuffer('a=1'))).text()
|
||||
.then(result => t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should support DataView as body', t =>
|
||||
new Response(new DataView(stringToArrayBuffer('a=1'))).text()
|
||||
.then(result => t.equal(result, 'a=1')))
|
||||
|
||||
t.test('should default to null as body', t => {
|
||||
const res = new Response()
|
||||
t.equal(res.body, null)
|
||||
|
||||
return res.text().then(result => t.equal(result, ''))
|
||||
})
|
||||
|
||||
t.test('should default to 200 as status code', t => {
|
||||
const res = new Response(null)
|
||||
t.equal(res.status, 200)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('should default to empty string as url', t => {
|
||||
const res = new Response()
|
||||
t.equal(res.url, '')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('trailers in response option', t => {
|
||||
const Headers = require('../lib/headers.js')
|
||||
const res = new Response(null, {
|
||||
trailer: Headers.createHeadersLenient({
|
||||
'X-Node-Fetch': 'hello world!'
|
||||
})
|
||||
})
|
||||
return res.trailer.then(trailers => {
|
||||
t.same(Array.from(trailers.keys()), ['x-node-fetch'])
|
||||
t.equal(trailers.get('x-node-fetch'), 'hello world!')
|
||||
})
|
||||
})
|
|
@ -0,0 +1,22 @@
|
|||
# ignore most things, include some others
|
||||
/*
|
||||
/.*
|
||||
|
||||
!bin/
|
||||
!lib/
|
||||
!docs/
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!README.md
|
||||
!CONTRIBUTING.md
|
||||
!LICENSE
|
||||
!CHANGELOG.md
|
||||
!example/
|
||||
!scripts/
|
||||
!tap-snapshots/
|
||||
!test/
|
||||
!.travis.yml
|
||||
!.gitignore
|
||||
!.gitattributes
|
||||
!coverage-map.js
|
||||
!index.js
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,47 @@
|
|||
# minipass-flush
|
||||
|
||||
A Minipass stream that calls a flush function before emitting 'end'
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Flush = require('minipass-flush')
|
||||
cons f = new Flush({
|
||||
flush (cb) {
|
||||
// call the cb when done, or return a promise
|
||||
// the 'end' event will wait for it, along with
|
||||
// close, finish, and prefinish.
|
||||
// call the cb with an error, or return a rejecting
|
||||
// promise to emit 'error' instead of doing the 'end'
|
||||
return rerouteAllEncryptions().then(() => clearAllChannels())
|
||||
},
|
||||
// all other minipass options accepted as well
|
||||
})
|
||||
|
||||
someDataSource.pipe(f).on('end', () => {
|
||||
// proper flushing has been accomplished
|
||||
})
|
||||
|
||||
// Or as a subclass implementing a 'flush' method:
|
||||
class MyFlush extends Flush {
|
||||
flush (cb) {
|
||||
// old fashioned callback style!
|
||||
rerouteAllEncryptions(er => {
|
||||
if (er)
|
||||
return cb(er)
|
||||
clearAllChannels(er => {
|
||||
if (er)
|
||||
cb(er)
|
||||
cb()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
That's about it.
|
||||
|
||||
If your `flush` method doesn't have to do anything asynchronous, then it's
|
||||
better to call the callback right away in this tick, rather than returning
|
||||
`Promise.resolve()`, so that the `end` event can happen as soon as
|
||||
possible.
|
|
@ -0,0 +1,39 @@
|
|||
const Minipass = require('minipass')
|
||||
const _flush = Symbol('_flush')
|
||||
const _flushed = Symbol('_flushed')
|
||||
const _flushing = Symbol('_flushing')
|
||||
class Flush extends Minipass {
|
||||
constructor (opt = {}) {
|
||||
if (typeof opt === 'function')
|
||||
opt = { flush: opt }
|
||||
|
||||
super(opt)
|
||||
|
||||
// or extend this class and provide a 'flush' method in your subclass
|
||||
if (typeof opt.flush !== 'function' && typeof this.flush !== 'function')
|
||||
throw new TypeError('must provide flush function in options')
|
||||
|
||||
this[_flush] = opt.flush || this.flush
|
||||
}
|
||||
|
||||
emit (ev, ...data) {
|
||||
if ((ev !== 'end' && ev !== 'finish') || this[_flushed])
|
||||
return super.emit(ev, ...data)
|
||||
|
||||
if (this[_flushing])
|
||||
return
|
||||
|
||||
this[_flushing] = true
|
||||
|
||||
const afterFlush = er => {
|
||||
this[_flushed] = true
|
||||
er ? super.emit('error', er) : super.emit('end')
|
||||
}
|
||||
|
||||
const ret = this[_flush](afterFlush)
|
||||
if (ret && ret.then)
|
||||
ret.then(() => afterFlush(), er => afterFlush(er))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Flush
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "minipass-flush",
|
||||
"version": "1.0.5",
|
||||
"description": "A Minipass stream that calls a flush function before emitting 'end'",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass-flush.git"
|
||||
},
|
||||
"keywords": [
|
||||
"minipass",
|
||||
"flush",
|
||||
"stream"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
const Flush = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
t.test('flush option, ok, cb', t => {
|
||||
let flushCalled = false
|
||||
const f = new Flush((cb) => {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return cb()
|
||||
})
|
||||
f.setEncoding('utf8')
|
||||
f.on('end', () => {
|
||||
t.equal(flushCalled, true, 'called flush before end event')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, ok, promise', t => {
|
||||
let flushCalled = false
|
||||
const f = new Flush({
|
||||
encoding: 'utf8',
|
||||
flush () {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return Promise.resolve(true)
|
||||
}
|
||||
})
|
||||
f.on('end', () => {
|
||||
t.equal(flushCalled, true, 'called flush before end event')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, not ok, cb', t => {
|
||||
let flushCalled = false
|
||||
const poop = new Error('poop')
|
||||
// can override subclass's flush with an option
|
||||
const f = new (class extends Flush {
|
||||
flush (cb) {
|
||||
t.fail('should not call this flush function')
|
||||
}
|
||||
})({
|
||||
encoding: 'utf8',
|
||||
flush (cb) {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return cb(poop)
|
||||
},
|
||||
})
|
||||
|
||||
f.on('error', er => {
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.equal(flushCalled, true, 'called flush before error event')
|
||||
t.equal(er, poop, 'flush error was raised')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, not ok, promise', t => {
|
||||
let flushCalled = false
|
||||
const poop = new Error('poop')
|
||||
|
||||
// extending a subclass with a flush() method works the same way
|
||||
const f = new (class extends Flush {
|
||||
flush () {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return Promise.reject(poop)
|
||||
}
|
||||
})()
|
||||
f.setEncoding('utf8')
|
||||
|
||||
f.on('error', er => {
|
||||
t.equal(flushCalled, true, 'called flush before error event')
|
||||
t.equal(er, poop, 'flush error was raised')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('missing flush option throws', t => {
|
||||
t.throws(() => new Flush({}), {
|
||||
message: 'must provide flush function in options'
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('only flush once', t => {
|
||||
const f = new (class extends Flush {
|
||||
flush (cb) {
|
||||
if (this.flushCalled)
|
||||
cb(new Error('called flush more than once'))
|
||||
this.flushCalled = true
|
||||
// why would you do this even, it's a very bad idea!
|
||||
this.emit('end')
|
||||
cb()
|
||||
}
|
||||
})
|
||||
|
||||
f.end()
|
||||
|
||||
let sawEnd = false
|
||||
f.on('end', () => {
|
||||
t.pass('re-emitted end')
|
||||
t.notOk(sawEnd, 'this should be the first time seeing end')
|
||||
sawEnd = true
|
||||
})
|
||||
t.ok(sawEnd, 'should have emitted the first time')
|
||||
f.on('end', () => {
|
||||
t.ok(sawEnd, 'this happens after')
|
||||
t.pass('re-emitted end again')
|
||||
t.end()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,27 @@
|
|||
The MIT License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
Copyright (c) 2011 Dominic Tarr
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
||||
----
|
||||
This is a derivative work based on JSONStream by Dominic Tarr, modified and
|
||||
redistributed according to the terms of the MIT license above.
|
||||
https://github.com/dominictarr/JSONStream
|
|
@ -0,0 +1,189 @@
|
|||
# minipass-json-stream
|
||||
|
||||
Like [JSONStream](http://npm.im/JSONStream), but using Minipass streams
|
||||
|
||||
## install
|
||||
|
||||
```
|
||||
npm install minipass-json-stream
|
||||
```
|
||||
|
||||
## example
|
||||
|
||||
```js
|
||||
|
||||
const request = require('request')
|
||||
const JSONStream = require('minipass-json-stream')
|
||||
const es = require('event-stream')
|
||||
|
||||
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
|
||||
.pipe(JSONStream.parse('rows.*'))
|
||||
.pipe(es.mapSync(function (data) {
|
||||
console.error(data)
|
||||
return data
|
||||
}))
|
||||
```
|
||||
|
||||
## new JSONStream(options)
|
||||
|
||||
Create a new stream. This is a [minipass](http://npm.im/minipass) stream
|
||||
that is always set in `objectMode`. It emits objects parsed out of
|
||||
string/buffer JSON input that match the supplied `path` option.
|
||||
|
||||
## JSONStream.parse(path)
|
||||
|
||||
Return a new JSONStream object to stream values that match a path.
|
||||
|
||||
(Equivalent to `new JSONStream({path})`.)
|
||||
|
||||
``` js
|
||||
JSONStream.parse('rows.*.doc')
|
||||
```
|
||||
|
||||
The `..` operator is the recursive descent operator from
|
||||
[JSONPath](http://goessner.net/articles/JsonPath/), which will match a
|
||||
child at any depth (see examples below).
|
||||
|
||||
If your keys have keys that include `.` or `*` etc, use an array instead.
|
||||
`['row', true, /^doc/]`.
|
||||
|
||||
If you use an array, `RegExp`s, booleans, and/or functions. The `..`
|
||||
operator is also available in array representation, using `{recurse:
|
||||
true}`. any object that matches the path will be emitted as 'data' (and
|
||||
`pipe`d down stream)
|
||||
|
||||
If `path` is empty or null, no 'data' events are emitted.
|
||||
|
||||
If you want to have keys emitted, you can prefix your `*` operator with
|
||||
`$`: `obj.$*` - in this case the data passed to the stream is an object
|
||||
with a `key` holding the key and a `value` property holding the data.
|
||||
|
||||
### Examples
|
||||
|
||||
query a couchdb view:
|
||||
|
||||
``` bash
|
||||
curl -sS localhost:5984/tests/_all_docs&include_docs=true
|
||||
```
|
||||
you will get something like this:
|
||||
|
||||
``` js
|
||||
{"total_rows":129,"offset":0,"rows":[
|
||||
{ "id":"change1_0.6995461115147918"
|
||||
, "key":"change1_0.6995461115147918"
|
||||
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
|
||||
, "doc":{
|
||||
"_id": "change1_0.6995461115147918"
|
||||
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
|
||||
},
|
||||
{ "id":"change2_0.6995461115147918"
|
||||
, "key":"change2_0.6995461115147918"
|
||||
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
|
||||
, "doc":{
|
||||
"_id":"change2_0.6995461115147918"
|
||||
, "_rev":"1-13677d36b98c0c075145bb8975105153"
|
||||
, "hello":2
|
||||
}
|
||||
},
|
||||
]}
|
||||
```
|
||||
|
||||
we are probably most interested in the `rows.*.doc`
|
||||
|
||||
create a `JSONStream` that parses the documents from the feed like this:
|
||||
|
||||
``` js
|
||||
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
|
||||
|
||||
stream.on('data', function(data) {
|
||||
console.log('received:', data);
|
||||
});
|
||||
|
||||
//emits anything from _before_ the first match
|
||||
stream.on('header', function (data) {
|
||||
console.log('header:', data) // => {"total_rows":129,"offset":0}
|
||||
})
|
||||
```
|
||||
|
||||
awesome!
|
||||
|
||||
In case you wanted the contents the doc emitted:
|
||||
|
||||
``` js
|
||||
// equivalent to: 'rows.*.doc.$*'
|
||||
var stream = JSONStream.parse([
|
||||
'rows',
|
||||
true,
|
||||
'doc',
|
||||
{emitKey: true}
|
||||
]) //rows, ANYTHING, doc, items in docs with keys
|
||||
|
||||
stream.on('data', function(data) {
|
||||
console.log('key:', data.key);
|
||||
console.log('value:', data.value);
|
||||
});
|
||||
```
|
||||
|
||||
You can also emit the path:
|
||||
|
||||
``` js
|
||||
var stream = JSONStream.parse([
|
||||
'rows',
|
||||
true,
|
||||
'doc',
|
||||
{emitPath: true}
|
||||
]) //rows, ANYTHING, doc, items in docs with keys
|
||||
|
||||
stream.on('data', function(data) {
|
||||
console.log('path:', data.path);
|
||||
console.log('value:', data.value);
|
||||
});
|
||||
```
|
||||
|
||||
### recursive patterns (..)
|
||||
|
||||
`JSONStream.parse('docs..value')`
|
||||
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
|
||||
will emit every `value` object that is a child, grand-child, etc. of the
|
||||
`docs` object. In this example, it will match exactly 5 times at various depth
|
||||
levels, emitting 0, 1, 2, 3 and 4 as results.
|
||||
|
||||
```js
|
||||
{
|
||||
"total": 5,
|
||||
"docs": [
|
||||
{
|
||||
"key": {
|
||||
"value": 0,
|
||||
"some": "property"
|
||||
}
|
||||
},
|
||||
{"value": 1},
|
||||
{"value": 2},
|
||||
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
|
||||
{"value": 4}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## JSONStream.parse(pattern, map)
|
||||
|
||||
(Equivalent to `new JSONStream({ pattern, map })`)
|
||||
|
||||
provide a function that can be used to map or filter
|
||||
the json output. `map` is passed the value at that node of the pattern,
|
||||
if `map` return non-nullish (anything but `null` or `undefined`)
|
||||
that value will be emitted in the stream. If it returns a nullish value,
|
||||
nothing will be emitted.
|
||||
|
||||
`JSONStream` also emits `'header'` and `'footer'` events,
|
||||
the `'header'` event contains anything in the output that was before
|
||||
the first match, and the `'footer'`, is anything after the last match.
|
||||
|
||||
## Acknowlegements
|
||||
|
||||
This module is a fork of [JSONStream](http://npm.im/JSONStream) by Dominic
|
||||
Tarr, modified and redistributed under the terms of the MIT license.
|
||||
|
||||
this module depends on https://github.com/creationix/jsonparse
|
||||
by Tim Caswell
|
|
@ -0,0 +1,227 @@
|
|||
// put javascript in here
|
||||
'use strict'
|
||||
|
||||
const Parser = require('jsonparse')
|
||||
const Minipass = require('minipass')
|
||||
|
||||
class JSONStreamError extends Error {
|
||||
constructor (err, caller) {
|
||||
super(err.message)
|
||||
Error.captureStackTrace(this, caller || this.constructor)
|
||||
}
|
||||
get name () {
|
||||
return 'JSONStreamError'
|
||||
}
|
||||
set name (n) {}
|
||||
}
|
||||
|
||||
const check = (x, y) =>
|
||||
typeof x === 'string' ? String(y) === x
|
||||
: x && typeof x.test === 'function' ? x.test(y)
|
||||
: typeof x === 'boolean' || typeof x === 'object' ? x
|
||||
: typeof x === 'function' ? x(y)
|
||||
: false
|
||||
|
||||
const _parser = Symbol('_parser')
|
||||
const _onValue = Symbol('_onValue')
|
||||
const _onTokenOriginal = Symbol('_onTokenOriginal')
|
||||
const _onToken = Symbol('_onToken')
|
||||
const _onError = Symbol('_onError')
|
||||
const _count = Symbol('_count')
|
||||
const _path = Symbol('_path')
|
||||
const _map = Symbol('_map')
|
||||
const _root = Symbol('_root')
|
||||
const _header = Symbol('_header')
|
||||
const _footer = Symbol('_footer')
|
||||
const _setHeaderFooter = Symbol('_setHeaderFooter')
|
||||
const _ending = Symbol('_ending')
|
||||
|
||||
class JSONStream extends Minipass {
|
||||
constructor (opts = {}) {
|
||||
super({
|
||||
...opts,
|
||||
objectMode: true,
|
||||
})
|
||||
|
||||
this[_ending] = false
|
||||
const parser = this[_parser] = new Parser()
|
||||
parser.onValue = value => this[_onValue](value)
|
||||
this[_onTokenOriginal] = parser.onToken
|
||||
parser.onToken = (token, value) => this[_onToken](token, value)
|
||||
parser.onError = er => this[_onError](er)
|
||||
|
||||
this[_count] = 0
|
||||
this[_path] = typeof opts.path === 'string'
|
||||
? opts.path.split('.').map(e =>
|
||||
e === '$*' ? { emitKey: true }
|
||||
: e === '*' ? true
|
||||
: e === '' ? { recurse: true }
|
||||
: e)
|
||||
: Array.isArray(opts.path) && opts.path.length ? opts.path
|
||||
: null
|
||||
|
||||
this[_map] = typeof opts.map === 'function' ? opts.map : null
|
||||
this[_root] = null
|
||||
this[_header] = null
|
||||
this[_footer] = null
|
||||
this[_count] = 0
|
||||
}
|
||||
|
||||
[_setHeaderFooter] (key, value) {
|
||||
// header has not been emitted yet
|
||||
if (this[_header] !== false) {
|
||||
this[_header] = this[_header] || {}
|
||||
this[_header][key] = value
|
||||
}
|
||||
|
||||
// footer has not been emitted yet but header has
|
||||
if (this[_footer] !== false && this[_header] === false) {
|
||||
this[_footer] = this[_footer] || {}
|
||||
this[_footer][key] = value
|
||||
}
|
||||
}
|
||||
|
||||
[_onError] (er) {
|
||||
// error will always happen during a write() call.
|
||||
const caller = this[_ending] ? this.end : this.write
|
||||
this[_ending] = false
|
||||
return this.emit('error', new JSONStreamError(er, caller))
|
||||
}
|
||||
|
||||
[_onToken] (token, value) {
|
||||
const parser = this[_parser]
|
||||
this[_onTokenOriginal].call(parser, token, value)
|
||||
if (parser.stack.length === 0) {
|
||||
if (this[_root]) {
|
||||
const root = this[_root]
|
||||
if (!this[_path])
|
||||
super.write(root)
|
||||
this[_root] = null
|
||||
this[_count] = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[_onValue] (value) {
|
||||
const parser = this[_parser]
|
||||
// the LAST onValue encountered is the root object.
|
||||
// just overwrite it each time.
|
||||
this[_root] = value
|
||||
|
||||
if(!this[_path]) return
|
||||
|
||||
let i = 0 // iterates on path
|
||||
let j = 0 // iterates on stack
|
||||
let emitKey = false
|
||||
let emitPath = false
|
||||
while (i < this[_path].length) {
|
||||
const key = this[_path][i]
|
||||
j++
|
||||
|
||||
if (key && !key.recurse) {
|
||||
const c = (j === parser.stack.length) ? parser : parser.stack[j]
|
||||
if (!c) return
|
||||
if (!check(key, c.key)) {
|
||||
this[_setHeaderFooter](c.key, value)
|
||||
return
|
||||
}
|
||||
emitKey = !!key.emitKey;
|
||||
emitPath = !!key.emitPath;
|
||||
i++
|
||||
} else {
|
||||
i++
|
||||
if (i >= this[_path].length)
|
||||
return
|
||||
const nextKey = this[_path][i]
|
||||
if (!nextKey)
|
||||
return
|
||||
while (true) {
|
||||
const c = (j === parser.stack.length) ? parser : parser.stack[j]
|
||||
if (!c) return
|
||||
if (check(nextKey, c.key)) {
|
||||
i++
|
||||
if (!Object.isFrozen(parser.stack[j]))
|
||||
parser.stack[j].value = null
|
||||
break
|
||||
} else {
|
||||
this[_setHeaderFooter](c.key, value)
|
||||
}
|
||||
j++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// emit header
|
||||
if (this[_header]) {
|
||||
const header = this[_header]
|
||||
this[_header] = false
|
||||
this.emit('header', header)
|
||||
}
|
||||
if (j !== parser.stack.length) return
|
||||
|
||||
this[_count] ++
|
||||
const actualPath = parser.stack.slice(1)
|
||||
.map(e => e.key).concat([parser.key])
|
||||
if (value !== null && value !== undefined) {
|
||||
const data = this[_map] ? this[_map](value, actualPath) : value
|
||||
if (data !== null && data !== undefined) {
|
||||
const emit = emitKey || emitPath ? { value: data } : data
|
||||
if (emitKey)
|
||||
emit.key = parser.key
|
||||
if (emitPath)
|
||||
emit.path = actualPath
|
||||
super.write(emit)
|
||||
}
|
||||
}
|
||||
|
||||
if (parser.value)
|
||||
delete parser.value[parser.key]
|
||||
|
||||
for (const k of parser.stack) {
|
||||
k.value = null
|
||||
}
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = null
|
||||
if (typeof chunk === 'string')
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
else if (!Buffer.isBuffer(chunk))
|
||||
return this.emit('error', new TypeError(
|
||||
'Can only parse JSON from string or buffer input'))
|
||||
this[_parser].write(chunk)
|
||||
if (cb)
|
||||
cb()
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
this[_ending] = true
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = null
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
|
||||
const h = this[_header]
|
||||
this[_header] = null
|
||||
const f = this[_footer]
|
||||
this[_footer] = null
|
||||
if (h)
|
||||
this.emit('header', h)
|
||||
if (f)
|
||||
this.emit('footer', f)
|
||||
return super.end()
|
||||
}
|
||||
|
||||
static get JSONStreamError () { return JSONStreamError }
|
||||
static parse (path, map) {
|
||||
return new JSONStream({path, map})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = JSONStream
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "minipass-json-stream",
|
||||
"version": "1.0.1",
|
||||
"description": "Like JSONStream, but using Minipass streams",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"JSONStream": "^1.3.5",
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonparse": "^1.3.1",
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/npm/minipass-json-stream.git"
|
||||
},
|
||||
"keywords": [
|
||||
"stream",
|
||||
"json",
|
||||
"parse",
|
||||
"minipass",
|
||||
"JSONStream"
|
||||
],
|
||||
"files": [
|
||||
"index.js"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,69 @@
|
|||
# minipass-pipeline
|
||||
|
||||
Create a pipeline of streams using Minipass.
|
||||
|
||||
Calls `.pipe()` on all the streams in the list. Returns a stream where
|
||||
writes got to the first pipe in the chain, and reads are from the last.
|
||||
|
||||
Errors are proxied along the chain and emitted on the Pipeline stream.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
// the list of streams to pipeline together,
|
||||
// a bit like `input | transform | output` in bash
|
||||
const p = new Pipeline(input, transform, output)
|
||||
|
||||
p.write('foo') // writes to input
|
||||
p.on('data', chunk => doSomething()) // reads from output stream
|
||||
|
||||
// less contrived example (but still pretty contrived)...
|
||||
const decode = new bunzipDecoder()
|
||||
const unpack = tar.extract({ cwd: 'target-dir' })
|
||||
const tbz = new Pipeline(decode, unpack)
|
||||
|
||||
fs.createReadStream('archive.tbz').pipe(tbz)
|
||||
|
||||
// specify any minipass options if you like, as the first argument
|
||||
// it'll only try to pipeline event emitters with a .pipe() method
|
||||
const p = new Pipeline({ objectMode: true }, input, transform, output)
|
||||
|
||||
// If you don't know the things to pipe in right away, that's fine.
|
||||
// use p.push(stream) to add to the end, or p.unshift(stream) to the front
|
||||
const databaseDecoderStreamDoohickey = (connectionInfo) => {
|
||||
const p = new Pipeline()
|
||||
logIntoDatabase(connectionInfo).then(connection => {
|
||||
initializeDecoderRing(connectionInfo).then(decoderRing => {
|
||||
p.push(connection, decoderRing)
|
||||
getUpstreamSource(upstream => {
|
||||
p.unshift(upstream)
|
||||
})
|
||||
})
|
||||
})
|
||||
// return to caller right away
|
||||
// emitted data will be upstream -> connection -> decoderRing pipeline
|
||||
return p
|
||||
}
|
||||
```
|
||||
|
||||
Pipeline is a [minipass](http://npm.im/minipass) stream, so it's as
|
||||
synchronous as the streams it wraps. It will buffer data until there is a
|
||||
reader, but no longer, so make sure to attach your listeners before you
|
||||
pipe it somewhere else.
|
||||
|
||||
## `new Pipeline(opts = {}, ...streams)`
|
||||
|
||||
Create a new Pipeline with the specified Minipass options and any streams
|
||||
provided.
|
||||
|
||||
## `pipeline.push(stream, ...)`
|
||||
|
||||
Attach one or more streams to the pipeline at the end (read) side of the
|
||||
pipe chain.
|
||||
|
||||
## `pipeline.unshift(stream, ...)`
|
||||
|
||||
Attach one or more streams to the pipeline at the start (write) side of the
|
||||
pipe chain.
|
|
@ -0,0 +1,128 @@
|
|||
const Minipass = require('minipass')
|
||||
const EE = require('events')
|
||||
const isStream = s => s && s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
)
|
||||
|
||||
const _head = Symbol('_head')
|
||||
const _tail = Symbol('_tail')
|
||||
const _linkStreams = Symbol('_linkStreams')
|
||||
const _setHead = Symbol('_setHead')
|
||||
const _setTail = Symbol('_setTail')
|
||||
const _onError = Symbol('_onError')
|
||||
const _onData = Symbol('_onData')
|
||||
const _onEnd = Symbol('_onEnd')
|
||||
const _onDrain = Symbol('_onDrain')
|
||||
const _streams = Symbol('_streams')
|
||||
class Pipeline extends Minipass {
|
||||
constructor (opts, ...streams) {
|
||||
if (isStream(opts)) {
|
||||
streams.unshift(opts)
|
||||
opts = {}
|
||||
}
|
||||
|
||||
super(opts)
|
||||
this[_streams] = []
|
||||
if (streams.length)
|
||||
this.push(...streams)
|
||||
}
|
||||
|
||||
[_linkStreams] (streams) {
|
||||
// reduce takes (left,right), and we return right to make it the
|
||||
// new left value.
|
||||
return streams.reduce((src, dest) => {
|
||||
src.on('error', er => dest.emit('error', er))
|
||||
src.pipe(dest)
|
||||
return dest
|
||||
})
|
||||
}
|
||||
|
||||
push (...streams) {
|
||||
this[_streams].push(...streams)
|
||||
if (this[_tail])
|
||||
streams.unshift(this[_tail])
|
||||
|
||||
const linkRet = this[_linkStreams](streams)
|
||||
|
||||
this[_setTail](linkRet)
|
||||
if (!this[_head])
|
||||
this[_setHead](streams[0])
|
||||
}
|
||||
|
||||
unshift (...streams) {
|
||||
this[_streams].unshift(...streams)
|
||||
if (this[_head])
|
||||
streams.push(this[_head])
|
||||
|
||||
const linkRet = this[_linkStreams](streams)
|
||||
this[_setHead](streams[0])
|
||||
if (!this[_tail])
|
||||
this[_setTail](linkRet)
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
// set fire to the whole thing.
|
||||
this[_streams].forEach(s =>
|
||||
typeof s.destroy === 'function' && s.destroy())
|
||||
return super.destroy(er)
|
||||
}
|
||||
|
||||
// readable interface -> tail
|
||||
[_setTail] (stream) {
|
||||
this[_tail] = stream
|
||||
stream.on('error', er => this[_onError](stream, er))
|
||||
stream.on('data', chunk => this[_onData](stream, chunk))
|
||||
stream.on('end', () => this[_onEnd](stream))
|
||||
stream.on('finish', () => this[_onEnd](stream))
|
||||
}
|
||||
|
||||
// errors proxied down the pipeline
|
||||
// they're considered part of the "read" interface
|
||||
[_onError] (stream, er) {
|
||||
if (stream === this[_tail])
|
||||
this.emit('error', er)
|
||||
}
|
||||
[_onData] (stream, chunk) {
|
||||
if (stream === this[_tail])
|
||||
super.write(chunk)
|
||||
}
|
||||
[_onEnd] (stream) {
|
||||
if (stream === this[_tail])
|
||||
super.end()
|
||||
}
|
||||
pause () {
|
||||
super.pause()
|
||||
return this[_tail] && this[_tail].pause && this[_tail].pause()
|
||||
}
|
||||
|
||||
// NB: Minipass calls its internal private [RESUME] method during
|
||||
// pipe drains, to avoid hazards where stream.resume() is overridden.
|
||||
// Thus, we need to listen to the resume *event*, not override the
|
||||
// resume() method, and proxy *that* to the tail.
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'resume' && this[_tail] && this[_tail].resume)
|
||||
this[_tail].resume()
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
|
||||
// writable interface -> head
|
||||
[_setHead] (stream) {
|
||||
this[_head] = stream
|
||||
stream.on('drain', () => this[_onDrain](stream))
|
||||
}
|
||||
[_onDrain] (stream) {
|
||||
if (stream === this[_head])
|
||||
this.emit('drain')
|
||||
}
|
||||
write (chunk, enc, cb) {
|
||||
return this[_head].write(chunk, enc, cb) &&
|
||||
(this.flowing || this.buffer.length === 0)
|
||||
}
|
||||
end (chunk, enc, cb) {
|
||||
this[_head].end(chunk, enc, cb)
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pipeline
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"name": "minipass-pipeline",
|
||||
"version": "1.2.4",
|
||||
"description": "create a pipeline of streams using Minipass",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
# ignore most things, include some others
|
||||
/*
|
||||
/.*
|
||||
|
||||
!bin/
|
||||
!lib/
|
||||
!docs/
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!README.md
|
||||
!CONTRIBUTING.md
|
||||
!LICENSE
|
||||
!CHANGELOG.md
|
||||
!example/
|
||||
!scripts/
|
||||
!tap-snapshots/
|
||||
!test/
|
||||
!.travis.yml
|
||||
!.gitignore
|
||||
!.gitattributes
|
||||
!coverage-map.js
|
||||
!index.js
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -0,0 +1,28 @@
|
|||
# minipass-sized
|
||||
|
||||
A Minipass stream that raises an error if you get a different number of
|
||||
bytes than expected.
|
||||
|
||||
## USAGE
|
||||
|
||||
Use just like any old [minipass](http://npm.im/minipass) stream, but
|
||||
provide a `size` option to the constructor.
|
||||
|
||||
The `size` option must be a positive integer, smaller than
|
||||
`Number.MAX_SAFE_INTEGER`.
|
||||
|
||||
```js
|
||||
const MinipassSized = require('minipass-sized')
|
||||
// figure out how much data you expect to get
|
||||
const expectedSize = +headers['content-length']
|
||||
const stream = new MinipassSized({ size: expectedSize })
|
||||
stream.on('error', er => {
|
||||
// if it's the wrong size, then this will raise an error with
|
||||
// { found: <number>, expect: <number>, code: 'EBADSIZE' }
|
||||
})
|
||||
response.pipe(stream)
|
||||
```
|
||||
|
||||
Caveats: this does not work with `objectMode` streams, and will throw a
|
||||
`TypeError` from the constructor if the size argument is missing or
|
||||
invalid.
|
|
@ -0,0 +1,67 @@
|
|||
const Minipass = require('minipass')
|
||||
|
||||
class SizeError extends Error {
|
||||
constructor (found, expect) {
|
||||
super(`Bad data size: expected ${expect} bytes, but got ${found}`)
|
||||
this.expect = expect
|
||||
this.found = found
|
||||
this.code = 'EBADSIZE'
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
get name () {
|
||||
return 'SizeError'
|
||||
}
|
||||
}
|
||||
|
||||
class MinipassSized extends Minipass {
|
||||
constructor (options = {}) {
|
||||
super(options)
|
||||
|
||||
if (options.objectMode)
|
||||
throw new TypeError(`${
|
||||
this.constructor.name
|
||||
} streams only work with string and buffer data`)
|
||||
|
||||
this.found = 0
|
||||
this.expect = options.size
|
||||
if (typeof this.expect !== 'number' ||
|
||||
this.expect > Number.MAX_SAFE_INTEGER ||
|
||||
isNaN(this.expect) ||
|
||||
this.expect < 0 ||
|
||||
!isFinite(this.expect) ||
|
||||
this.expect !== Math.floor(this.expect))
|
||||
throw new Error('invalid expected size: ' + this.expect)
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
const buffer = Buffer.isBuffer(chunk) ? chunk
|
||||
: typeof chunk === 'string' ?
|
||||
Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8')
|
||||
: chunk
|
||||
|
||||
if (!Buffer.isBuffer(buffer)) {
|
||||
this.emit('error', new TypeError(`${
|
||||
this.constructor.name
|
||||
} streams only work with string and buffer data`))
|
||||
return false
|
||||
}
|
||||
|
||||
this.found += buffer.length
|
||||
if (this.found > this.expect)
|
||||
this.emit('error', new SizeError(this.found, this.expect))
|
||||
|
||||
return super.write(chunk, encoding, cb)
|
||||
}
|
||||
|
||||
emit (ev, ...data) {
|
||||
if (ev === 'end') {
|
||||
if (this.found !== this.expect)
|
||||
this.emit('error', new SizeError(this.found, this.expect))
|
||||
}
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
}
|
||||
|
||||
MinipassSized.SizeError = SizeError
|
||||
|
||||
module.exports = MinipassSized
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"name": "minipass-sized",
|
||||
"version": "1.0.3",
|
||||
"description": "A Minipass stream that raises an error if you get a different number of bytes than expected",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"main": "index.js",
|
||||
"keywords": [
|
||||
"minipass",
|
||||
"size",
|
||||
"length"
|
||||
],
|
||||
"directories": {
|
||||
"test": "test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass-sized.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
const t = require('tap')
|
||||
const MPS = require('../')
|
||||
|
||||
t.test('ok if size checks out', t => {
|
||||
const mps = new MPS({ size: 4 })
|
||||
|
||||
mps.write(Buffer.from('a').toString('hex'), 'hex')
|
||||
mps.write(Buffer.from('sd'))
|
||||
mps.end('f')
|
||||
return mps.concat().then(data => t.equal(data.toString(), 'asdf'))
|
||||
})
|
||||
|
||||
t.test('error if size exceeded', t => {
|
||||
const mps = new MPS({ size: 1 })
|
||||
mps.on('error', er => {
|
||||
t.match(er, {
|
||||
message: 'Bad data size: expected 1 bytes, but got 4',
|
||||
found: 4,
|
||||
expect: 1,
|
||||
code: 'EBADSIZE',
|
||||
name: 'SizeError',
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
mps.write('asdf')
|
||||
})
|
||||
|
||||
t.test('error if size is not met', t => {
|
||||
const mps = new MPS({ size: 999 })
|
||||
t.throws(() => mps.end(), {
|
||||
message: 'Bad data size: expected 999 bytes, but got 0',
|
||||
found: 0,
|
||||
name: 'SizeError',
|
||||
expect: 999,
|
||||
code: 'EBADSIZE',
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('error if non-string/buffer is written', t => {
|
||||
const mps = new MPS({size:1})
|
||||
mps.on('error', er => {
|
||||
t.match(er, {
|
||||
message: 'MinipassSized streams only work with string and buffer data'
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
mps.write({some:'object'})
|
||||
})
|
||||
|
||||
t.test('projectiles', t => {
|
||||
t.throws(() => new MPS(), {
|
||||
message: 'invalid expected size: undefined'
|
||||
}, 'size is required')
|
||||
t.throws(() => new MPS({size: true}), {
|
||||
message: 'invalid expected size: true'
|
||||
}, 'size must be number')
|
||||
t.throws(() => new MPS({size: NaN}), {
|
||||
message: 'invalid expected size: NaN'
|
||||
}, 'size must not be NaN')
|
||||
t.throws(() => new MPS({size:1.2}), {
|
||||
message: 'invalid expected size: 1.2'
|
||||
}, 'size must be integer')
|
||||
t.throws(() => new MPS({size: Infinity}), {
|
||||
message: 'invalid expected size: Infinity'
|
||||
}, 'size must be finite')
|
||||
t.throws(() => new MPS({size: -1}), {
|
||||
message: 'invalid expected size: -1'
|
||||
}, 'size must be positive')
|
||||
t.throws(() => new MPS({objectMode: true}), {
|
||||
message: 'MinipassSized streams only work with string and buffer data'
|
||||
}, 'no objectMode')
|
||||
t.throws(() => new MPS({size: Number.MAX_SAFE_INTEGER + 1000000}), {
|
||||
message: 'invalid expected size: 9007199255740992'
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('exports SizeError class', t => {
|
||||
t.isa(MPS.SizeError, 'function')
|
||||
t.isa(MPS.SizeError.prototype, Error)
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,4 @@
|
|||
node_modules/
|
||||
coverage/
|
||||
.nyc_output/
|
||||
nyc_output/
|
|
@ -0,0 +1,6 @@
|
|||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- node
|
||||
- 12
|
||||
- 10
|
|
@ -0,0 +1,26 @@
|
|||
Minizlib was created by Isaac Z. Schlueter.
|
||||
It is a derivative work of the Node.js project.
|
||||
|
||||
"""
|
||||
Copyright Isaac Z. Schlueter and Contributors
|
||||
Copyright Node.js contributors. All rights reserved.
|
||||
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
|
@ -0,0 +1,60 @@
|
|||
# minizlib
|
||||
|
||||
A fast zlib stream built on [minipass](http://npm.im/minipass) and
|
||||
Node.js's zlib binding.
|
||||
|
||||
This module was created to serve the needs of
|
||||
[node-tar](http://npm.im/tar) and
|
||||
[minipass-fetch](http://npm.im/minipass-fetch).
|
||||
|
||||
Brotli is supported in versions of node with a Brotli binding.
|
||||
|
||||
## How does this differ from the streams in `require('zlib')`?
|
||||
|
||||
First, there are no convenience methods to compress or decompress a
|
||||
buffer. If you want those, use the built-in `zlib` module. This is
|
||||
only streams. That being said, Minipass streams to make it fairly easy to
|
||||
use as one-liners: `new zlib.Deflate().end(data).read()` will return the
|
||||
deflate compressed result.
|
||||
|
||||
This module compresses and decompresses the data as fast as you feed
|
||||
it in. It is synchronous, and runs on the main process thread. Zlib
|
||||
and Brotli operations can be high CPU, but they're very fast, and doing it
|
||||
this way means much less bookkeeping and artificial deferral.
|
||||
|
||||
Node's built in zlib streams are built on top of `stream.Transform`.
|
||||
They do the maximally safe thing with respect to consistent
|
||||
asynchrony, buffering, and backpressure.
|
||||
|
||||
See [Minipass](http://npm.im/minipass) for more on the differences between
|
||||
Node.js core streams and Minipass streams, and the convenience methods
|
||||
provided by that class.
|
||||
|
||||
## Classes
|
||||
|
||||
- Deflate
|
||||
- Inflate
|
||||
- Gzip
|
||||
- Gunzip
|
||||
- DeflateRaw
|
||||
- InflateRaw
|
||||
- Unzip
|
||||
- BrotliCompress (Node v10 and higher)
|
||||
- BrotliDecompress (Node v10 and higher)
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const zlib = require('minizlib')
|
||||
const input = sourceOfCompressedData()
|
||||
const decode = new zlib.BrotliDecompress()
|
||||
const output = whereToWriteTheDecodedData()
|
||||
input.pipe(decode).pipe(output)
|
||||
```
|
||||
|
||||
## REPRODUCIBLE BUILDS
|
||||
|
||||
To create reproducible gzip compressed files across different operating
|
||||
systems, set `portable: true` in the options. This causes minizlib to set
|
||||
the `OS` indicator in byte 9 of the extended gzip header to `0xFF` for
|
||||
'unknown'.
|
|
@ -0,0 +1,110 @@
|
|||
'use strict'
|
||||
const core = require('zlib')
|
||||
const miniz = require('minizlib')
|
||||
const fs = require('fs')
|
||||
const file = 'npm-5-8x.tgz'
|
||||
const data = fs.readFileSync(file)
|
||||
|
||||
const N = +process.argv[2] || 10
|
||||
let C = +process.argv[3] || 3
|
||||
|
||||
let n = 0
|
||||
const doMini = () => {
|
||||
const miniunz = new miniz.Unzip()
|
||||
miniunz.on('end', () => {
|
||||
console.timeEnd('mini')
|
||||
if (++n < N)
|
||||
doMini()
|
||||
else {
|
||||
n = 0
|
||||
doCore()
|
||||
}
|
||||
})
|
||||
|
||||
console.time('mini')
|
||||
miniunz.end(data)
|
||||
miniunz.resume()
|
||||
}
|
||||
|
||||
const doCore = () => {
|
||||
const coreunz = new core.Unzip()
|
||||
coreunz.on('end', () => {
|
||||
console.timeEnd('core')
|
||||
if (++n < N)
|
||||
doCore()
|
||||
else if (--C > 0) {
|
||||
n = 0
|
||||
doMini()
|
||||
}
|
||||
})
|
||||
|
||||
console.time('core')
|
||||
coreunz.end(data)
|
||||
coreunz.resume()
|
||||
}
|
||||
|
||||
doMini()
|
||||
|
||||
/*
|
||||
$ node bench.js
|
||||
mini: 1062.121ms
|
||||
mini: 992.747ms
|
||||
mini: 981.529ms
|
||||
mini: 939.813ms
|
||||
mini: 1009.037ms
|
||||
mini: 969.063ms
|
||||
mini: 961.559ms
|
||||
mini: 952.462ms
|
||||
mini: 931.309ms
|
||||
mini: 942.898ms
|
||||
core: 1133.598ms
|
||||
core: 1112.883ms
|
||||
core: 1086.734ms
|
||||
core: 1073.089ms
|
||||
core: 1048.197ms
|
||||
core: 1072.097ms
|
||||
core: 1073.972ms
|
||||
core: 1053.326ms
|
||||
core: 1053.606ms
|
||||
core: 1052.969ms
|
||||
mini: 906.290ms
|
||||
mini: 1001.500ms
|
||||
mini: 1035.073ms
|
||||
mini: 963.583ms
|
||||
mini: 922.108ms
|
||||
mini: 935.533ms
|
||||
mini: 877.866ms
|
||||
mini: 914.190ms
|
||||
mini: 908.777ms
|
||||
mini: 889.769ms
|
||||
core: 1103.496ms
|
||||
core: 1049.253ms
|
||||
core: 1136.523ms
|
||||
core: 1066.346ms
|
||||
core: 1085.796ms
|
||||
core: 1062.242ms
|
||||
core: 1071.801ms
|
||||
core: 1078.519ms
|
||||
core: 1077.774ms
|
||||
core: 1104.796ms
|
||||
mini: 934.895ms
|
||||
mini: 973.971ms
|
||||
mini: 938.026ms
|
||||
mini: 971.475ms
|
||||
mini: 946.436ms
|
||||
mini: 966.129ms
|
||||
mini: 943.973ms
|
||||
mini: 961.074ms
|
||||
mini: 966.523ms
|
||||
mini: 993.003ms
|
||||
core: 1107.929ms
|
||||
core: 1080.664ms
|
||||
core: 1075.637ms
|
||||
core: 1084.507ms
|
||||
core: 1071.859ms
|
||||
core: 1049.318ms
|
||||
core: 1054.679ms
|
||||
core: 1055.525ms
|
||||
core: 1060.224ms
|
||||
core: 1056.568ms
|
||||
*/
|
|
@ -0,0 +1,115 @@
|
|||
// Update with any zlib constants that are added or changed in the future.
|
||||
// Node v6 didn't export this, so we just hard code the version and rely
|
||||
// on all the other hard-coded values from zlib v4736. When node v6
|
||||
// support drops, we can just export the realZlibConstants object.
|
||||
const realZlibConstants = require('zlib').constants ||
|
||||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
|
||||
|
||||
module.exports = Object.freeze(Object.assign(Object.create(null), {
|
||||
Z_NO_FLUSH: 0,
|
||||
Z_PARTIAL_FLUSH: 1,
|
||||
Z_SYNC_FLUSH: 2,
|
||||
Z_FULL_FLUSH: 3,
|
||||
Z_FINISH: 4,
|
||||
Z_BLOCK: 5,
|
||||
Z_OK: 0,
|
||||
Z_STREAM_END: 1,
|
||||
Z_NEED_DICT: 2,
|
||||
Z_ERRNO: -1,
|
||||
Z_STREAM_ERROR: -2,
|
||||
Z_DATA_ERROR: -3,
|
||||
Z_MEM_ERROR: -4,
|
||||
Z_BUF_ERROR: -5,
|
||||
Z_VERSION_ERROR: -6,
|
||||
Z_NO_COMPRESSION: 0,
|
||||
Z_BEST_SPEED: 1,
|
||||
Z_BEST_COMPRESSION: 9,
|
||||
Z_DEFAULT_COMPRESSION: -1,
|
||||
Z_FILTERED: 1,
|
||||
Z_HUFFMAN_ONLY: 2,
|
||||
Z_RLE: 3,
|
||||
Z_FIXED: 4,
|
||||
Z_DEFAULT_STRATEGY: 0,
|
||||
DEFLATE: 1,
|
||||
INFLATE: 2,
|
||||
GZIP: 3,
|
||||
GUNZIP: 4,
|
||||
DEFLATERAW: 5,
|
||||
INFLATERAW: 6,
|
||||
UNZIP: 7,
|
||||
BROTLI_DECODE: 8,
|
||||
BROTLI_ENCODE: 9,
|
||||
Z_MIN_WINDOWBITS: 8,
|
||||
Z_MAX_WINDOWBITS: 15,
|
||||
Z_DEFAULT_WINDOWBITS: 15,
|
||||
Z_MIN_CHUNK: 64,
|
||||
Z_MAX_CHUNK: Infinity,
|
||||
Z_DEFAULT_CHUNK: 16384,
|
||||
Z_MIN_MEMLEVEL: 1,
|
||||
Z_MAX_MEMLEVEL: 9,
|
||||
Z_DEFAULT_MEMLEVEL: 8,
|
||||
Z_MIN_LEVEL: -1,
|
||||
Z_MAX_LEVEL: 9,
|
||||
Z_DEFAULT_LEVEL: -1,
|
||||
BROTLI_OPERATION_PROCESS: 0,
|
||||
BROTLI_OPERATION_FLUSH: 1,
|
||||
BROTLI_OPERATION_FINISH: 2,
|
||||
BROTLI_OPERATION_EMIT_METADATA: 3,
|
||||
BROTLI_MODE_GENERIC: 0,
|
||||
BROTLI_MODE_TEXT: 1,
|
||||
BROTLI_MODE_FONT: 2,
|
||||
BROTLI_DEFAULT_MODE: 0,
|
||||
BROTLI_MIN_QUALITY: 0,
|
||||
BROTLI_MAX_QUALITY: 11,
|
||||
BROTLI_DEFAULT_QUALITY: 11,
|
||||
BROTLI_MIN_WINDOW_BITS: 10,
|
||||
BROTLI_MAX_WINDOW_BITS: 24,
|
||||
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
|
||||
BROTLI_DEFAULT_WINDOW: 22,
|
||||
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
|
||||
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
|
||||
BROTLI_PARAM_MODE: 0,
|
||||
BROTLI_PARAM_QUALITY: 1,
|
||||
BROTLI_PARAM_LGWIN: 2,
|
||||
BROTLI_PARAM_LGBLOCK: 3,
|
||||
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
|
||||
BROTLI_PARAM_SIZE_HINT: 5,
|
||||
BROTLI_PARAM_LARGE_WINDOW: 6,
|
||||
BROTLI_PARAM_NPOSTFIX: 7,
|
||||
BROTLI_PARAM_NDIRECT: 8,
|
||||
BROTLI_DECODER_RESULT_ERROR: 0,
|
||||
BROTLI_DECODER_RESULT_SUCCESS: 1,
|
||||
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
|
||||
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
|
||||
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
|
||||
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
|
||||
BROTLI_DECODER_NO_ERROR: 0,
|
||||
BROTLI_DECODER_SUCCESS: 1,
|
||||
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
|
||||
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
|
||||
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
|
||||
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
|
||||
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
|
||||
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
|
||||
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
|
||||
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
|
||||
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
|
||||
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
|
||||
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
|
||||
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
|
||||
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
|
||||
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
|
||||
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
|
||||
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
|
||||
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
|
||||
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
|
||||
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
|
||||
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
|
||||
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
|
||||
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
|
||||
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
|
||||
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
|
||||
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
|
||||
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
|
||||
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
|
||||
}, realZlibConstants))
|
|
@ -0,0 +1,348 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const Buffer = require('buffer').Buffer
|
||||
const realZlib = require('zlib')
|
||||
|
||||
const constants = exports.constants = require('./constants.js')
|
||||
const Minipass = require('minipass')
|
||||
|
||||
const OriginalBufferConcat = Buffer.concat
|
||||
|
||||
const _superWrite = Symbol('_superWrite')
|
||||
class ZlibError extends Error {
|
||||
constructor (err) {
|
||||
super('zlib: ' + err.message)
|
||||
this.code = err.code
|
||||
this.errno = err.errno
|
||||
/* istanbul ignore if */
|
||||
if (!this.code)
|
||||
this.code = 'ZLIB_ERROR'
|
||||
|
||||
this.message = 'zlib: ' + err.message
|
||||
Error.captureStackTrace(this, this.constructor)
|
||||
}
|
||||
|
||||
get name () {
|
||||
return 'ZlibError'
|
||||
}
|
||||
}
|
||||
|
||||
// the Zlib class they all inherit from
|
||||
// This thing manages the queue of requests, and returns
|
||||
// true or false if there is anything in the queue when
|
||||
// you call the .write() method.
|
||||
const _opts = Symbol('opts')
|
||||
const _flushFlag = Symbol('flushFlag')
|
||||
const _finishFlushFlag = Symbol('finishFlushFlag')
|
||||
const _fullFlushFlag = Symbol('fullFlushFlag')
|
||||
const _handle = Symbol('handle')
|
||||
const _onError = Symbol('onError')
|
||||
const _sawError = Symbol('sawError')
|
||||
const _level = Symbol('level')
|
||||
const _strategy = Symbol('strategy')
|
||||
const _ended = Symbol('ended')
|
||||
const _defaultFullFlush = Symbol('_defaultFullFlush')
|
||||
|
||||
class ZlibBase extends Minipass {
|
||||
constructor (opts, mode) {
|
||||
if (!opts || typeof opts !== 'object')
|
||||
throw new TypeError('invalid options for ZlibBase constructor')
|
||||
|
||||
super(opts)
|
||||
this[_sawError] = false
|
||||
this[_ended] = false
|
||||
this[_opts] = opts
|
||||
|
||||
this[_flushFlag] = opts.flush
|
||||
this[_finishFlushFlag] = opts.finishFlush
|
||||
// this will throw if any options are invalid for the class selected
|
||||
try {
|
||||
this[_handle] = new realZlib[mode](opts)
|
||||
} catch (er) {
|
||||
// make sure that all errors get decorated properly
|
||||
throw new ZlibError(er)
|
||||
}
|
||||
|
||||
this[_onError] = (err) => {
|
||||
// no sense raising multiple errors, since we abort on the first one.
|
||||
if (this[_sawError])
|
||||
return
|
||||
|
||||
this[_sawError] = true
|
||||
|
||||
// there is no way to cleanly recover.
|
||||
// continuing only obscures problems.
|
||||
this.close()
|
||||
this.emit('error', err)
|
||||
}
|
||||
|
||||
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
|
||||
this.once('end', () => this.close)
|
||||
}
|
||||
|
||||
close () {
|
||||
if (this[_handle]) {
|
||||
this[_handle].close()
|
||||
this[_handle] = null
|
||||
this.emit('close')
|
||||
}
|
||||
}
|
||||
|
||||
reset () {
|
||||
if (!this[_sawError]) {
|
||||
assert(this[_handle], 'zlib binding closed')
|
||||
return this[_handle].reset()
|
||||
}
|
||||
}
|
||||
|
||||
flush (flushFlag) {
|
||||
if (this.ended)
|
||||
return
|
||||
|
||||
if (typeof flushFlag !== 'number')
|
||||
flushFlag = this[_fullFlushFlag]
|
||||
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
this.flush(this[_finishFlushFlag])
|
||||
this[_ended] = true
|
||||
return super.end(null, null, cb)
|
||||
}
|
||||
|
||||
get ended () {
|
||||
return this[_ended]
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
// process the chunk using the sync process
|
||||
// then super.write() all the outputted chunks
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (typeof chunk === 'string')
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
|
||||
if (this[_sawError])
|
||||
return
|
||||
assert(this[_handle], 'zlib binding closed')
|
||||
|
||||
// _processChunk tries to .close() the native handle after it's done, so we
|
||||
// intercept that by temporarily making it a no-op.
|
||||
const nativeHandle = this[_handle]._handle
|
||||
const originalNativeClose = nativeHandle.close
|
||||
nativeHandle.close = () => {}
|
||||
const originalClose = this[_handle].close
|
||||
this[_handle].close = () => {}
|
||||
// It also calls `Buffer.concat()` at the end, which may be convenient
|
||||
// for some, but which we are not interested in as it slows us down.
|
||||
Buffer.concat = (args) => args
|
||||
let result
|
||||
try {
|
||||
const flushFlag = typeof chunk[_flushFlag] === 'number'
|
||||
? chunk[_flushFlag] : this[_flushFlag]
|
||||
result = this[_handle]._processChunk(chunk, flushFlag)
|
||||
// if we don't throw, reset it back how it was
|
||||
Buffer.concat = OriginalBufferConcat
|
||||
} catch (err) {
|
||||
// or if we do, put Buffer.concat() back before we emit error
|
||||
// Error events call into user code, which may call Buffer.concat()
|
||||
Buffer.concat = OriginalBufferConcat
|
||||
this[_onError](new ZlibError(err))
|
||||
} finally {
|
||||
if (this[_handle]) {
|
||||
// Core zlib resets `_handle` to null after attempting to close the
|
||||
// native handle. Our no-op handler prevented actual closure, but we
|
||||
// need to restore the `._handle` property.
|
||||
this[_handle]._handle = nativeHandle
|
||||
nativeHandle.close = originalNativeClose
|
||||
this[_handle].close = originalClose
|
||||
// `_processChunk()` adds an 'error' listener. If we don't remove it
|
||||
// after each call, these handlers start piling up.
|
||||
this[_handle].removeAllListeners('error')
|
||||
// make sure OUR error listener is still attached tho
|
||||
}
|
||||
}
|
||||
|
||||
if (this[_handle])
|
||||
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
|
||||
|
||||
let writeReturn
|
||||
if (result) {
|
||||
if (Array.isArray(result) && result.length > 0) {
|
||||
// The first buffer is always `handle._outBuffer`, which would be
|
||||
// re-used for later invocations; so, we always have to copy that one.
|
||||
writeReturn = this[_superWrite](Buffer.from(result[0]))
|
||||
for (let i = 1; i < result.length; i++) {
|
||||
writeReturn = this[_superWrite](result[i])
|
||||
}
|
||||
} else {
|
||||
writeReturn = this[_superWrite](Buffer.from(result))
|
||||
}
|
||||
}
|
||||
|
||||
if (cb)
|
||||
cb()
|
||||
return writeReturn
|
||||
}
|
||||
|
||||
[_superWrite] (data) {
|
||||
return super.write(data)
|
||||
}
|
||||
}
|
||||
|
||||
class Zlib extends ZlibBase {
|
||||
constructor (opts, mode) {
|
||||
opts = opts || {}
|
||||
|
||||
opts.flush = opts.flush || constants.Z_NO_FLUSH
|
||||
opts.finishFlush = opts.finishFlush || constants.Z_FINISH
|
||||
super(opts, mode)
|
||||
|
||||
this[_fullFlushFlag] = constants.Z_FULL_FLUSH
|
||||
this[_level] = opts.level
|
||||
this[_strategy] = opts.strategy
|
||||
}
|
||||
|
||||
params (level, strategy) {
|
||||
if (this[_sawError])
|
||||
return
|
||||
|
||||
if (!this[_handle])
|
||||
throw new Error('cannot switch params when binding is closed')
|
||||
|
||||
// no way to test this without also not supporting params at all
|
||||
/* istanbul ignore if */
|
||||
if (!this[_handle].params)
|
||||
throw new Error('not supported in this implementation')
|
||||
|
||||
if (this[_level] !== level || this[_strategy] !== strategy) {
|
||||
this.flush(constants.Z_SYNC_FLUSH)
|
||||
assert(this[_handle], 'zlib binding closed')
|
||||
// .params() calls .flush(), but the latter is always async in the
|
||||
// core zlib. We override .flush() temporarily to intercept that and
|
||||
// flush synchronously.
|
||||
const origFlush = this[_handle].flush
|
||||
this[_handle].flush = (flushFlag, cb) => {
|
||||
this.flush(flushFlag)
|
||||
cb()
|
||||
}
|
||||
try {
|
||||
this[_handle].params(level, strategy)
|
||||
} finally {
|
||||
this[_handle].flush = origFlush
|
||||
}
|
||||
/* istanbul ignore else */
|
||||
if (this[_handle]) {
|
||||
this[_level] = level
|
||||
this[_strategy] = strategy
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// minimal 2-byte header
|
||||
class Deflate extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'Deflate')
|
||||
}
|
||||
}
|
||||
|
||||
class Inflate extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'Inflate')
|
||||
}
|
||||
}
|
||||
|
||||
// gzip - bigger header, same deflate compression
|
||||
const _portable = Symbol('_portable')
|
||||
class Gzip extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'Gzip')
|
||||
this[_portable] = opts && !!opts.portable
|
||||
}
|
||||
|
||||
[_superWrite] (data) {
|
||||
if (!this[_portable])
|
||||
return super[_superWrite](data)
|
||||
|
||||
// we'll always get the header emitted in one first chunk
|
||||
// overwrite the OS indicator byte with 0xFF
|
||||
this[_portable] = false
|
||||
data[9] = 255
|
||||
return super[_superWrite](data)
|
||||
}
|
||||
}
|
||||
|
||||
class Gunzip extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'Gunzip')
|
||||
}
|
||||
}
|
||||
|
||||
// raw - no header
|
||||
class DeflateRaw extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'DeflateRaw')
|
||||
}
|
||||
}
|
||||
|
||||
class InflateRaw extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'InflateRaw')
|
||||
}
|
||||
}
|
||||
|
||||
// auto-detect header.
|
||||
class Unzip extends Zlib {
|
||||
constructor (opts) {
|
||||
super(opts, 'Unzip')
|
||||
}
|
||||
}
|
||||
|
||||
class Brotli extends ZlibBase {
|
||||
constructor (opts, mode) {
|
||||
opts = opts || {}
|
||||
|
||||
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
|
||||
opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
|
||||
|
||||
super(opts, mode)
|
||||
|
||||
this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
|
||||
}
|
||||
}
|
||||
|
||||
class BrotliCompress extends Brotli {
|
||||
constructor (opts) {
|
||||
super(opts, 'BrotliCompress')
|
||||
}
|
||||
}
|
||||
|
||||
class BrotliDecompress extends Brotli {
|
||||
constructor (opts) {
|
||||
super(opts, 'BrotliDecompress')
|
||||
}
|
||||
}
|
||||
|
||||
exports.Deflate = Deflate
|
||||
exports.Inflate = Inflate
|
||||
exports.Gzip = Gzip
|
||||
exports.Gunzip = Gunzip
|
||||
exports.DeflateRaw = DeflateRaw
|
||||
exports.InflateRaw = InflateRaw
|
||||
exports.Unzip = Unzip
|
||||
/* istanbul ignore else */
|
||||
if (typeof realZlib.BrotliCompress === 'function') {
|
||||
exports.BrotliCompress = BrotliCompress
|
||||
exports.BrotliDecompress = BrotliDecompress
|
||||
} else {
|
||||
exports.BrotliCompress = exports.BrotliDecompress = class {
|
||||
constructor () {
|
||||
throw new Error('Brotli is not supported in this version of Node.js')
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"name": "minizlib",
|
||||
"version": "2.1.2",
|
||||
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0",
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js --100 -J",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minizlib.git"
|
||||
},
|
||||
"keywords": [
|
||||
"zlib",
|
||||
"gzip",
|
||||
"gunzip",
|
||||
"deflate",
|
||||
"inflate",
|
||||
"compression",
|
||||
"zip",
|
||||
"unzip"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"constants.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
'use strict';
|
||||
const t = require('tap')
|
||||
if (!require('zlib').BrotliDecompress) {
|
||||
t.plan(0, 'brotli not supported')
|
||||
process.exit(0)
|
||||
}
|
||||
const zlib = require('../')
|
||||
const {resolve} = require('path')
|
||||
const fixture = resolve(__dirname, 'fixtures/person.jpg')
|
||||
const fs = require('fs')
|
||||
const file = fs.readFileSync(fixture)
|
||||
const chunkSize = 16;
|
||||
const deflater = new zlib.BrotliCompress();
|
||||
|
||||
const chunk = file.slice(0, chunkSize);
|
||||
const expectedFull = Buffer.from('iweA/9j/4AAQSkZJRgABAQEASA==', 'base64');
|
||||
|
||||
deflater.write(chunk)
|
||||
deflater.flush()
|
||||
const bufs = []
|
||||
deflater.on('data', b => bufs.push(b))
|
||||
const actualFull = Buffer.concat(bufs)
|
||||
t.deepEqual(actualFull, expectedFull)
|
|
@ -0,0 +1,35 @@
|
|||
'use strict'
|
||||
// Test unzipping a file that was created with a non-node brotli lib,
|
||||
// piped in as fast as possible.
|
||||
|
||||
const t = require('tap')
|
||||
if (!require('zlib').BrotliDecompress) {
|
||||
t.plan(0, 'brotli not supported')
|
||||
process.exit(0)
|
||||
}
|
||||
const zlib = require('../')
|
||||
const {resolve, basename} = require('path')
|
||||
const {sync: mkdirp} = require('mkdirp')
|
||||
const {sync: rimraf} = require('rimraf')
|
||||
const tmpdir = resolve(__dirname, basename(__filename, '.js'))
|
||||
mkdirp(tmpdir)
|
||||
t.teardown(() => rimraf(tmpdir))
|
||||
|
||||
const decompress = new zlib.BrotliDecompress()
|
||||
|
||||
const fs = require('fs')
|
||||
|
||||
const fixture = resolve(__dirname, 'fixtures/person.jpg.br')
|
||||
const unzippedFixture = resolve(__dirname, 'fixtures/person.jpg')
|
||||
const outputFile = resolve(tmpdir, 'person.jpg')
|
||||
const expect = fs.readFileSync(unzippedFixture)
|
||||
const inp = fs.createReadStream(fixture)
|
||||
const out = fs.createWriteStream(outputFile)
|
||||
|
||||
t.test('decompress and test output', t => {
|
||||
inp.pipe(decompress).pipe(out).on('close', () => {
|
||||
const actual = fs.readFileSync(outputFile)
|
||||
t.deepEqual(actual, expect)
|
||||
t.end()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,41 @@
|
|||
'use strict';
|
||||
// Test compressing and uncompressing a string with brotli
|
||||
|
||||
const t = require('tap')
|
||||
if (!require('zlib').BrotliDecompress) {
|
||||
t.plan(0, 'brotli not supported')
|
||||
process.exit(0)
|
||||
}
|
||||
const zlib = require('../');
|
||||
|
||||
const inputString = 'ΩΩLorem ipsum dolor sit amet, consectetur adipiscing eli' +
|
||||
't. Morbi faucibus, purus at gravida dictum, libero arcu ' +
|
||||
'convallis lacus, in commodo libero metus eu nisi. Nullam' +
|
||||
' commodo, neque nec porta placerat, nisi est fermentum a' +
|
||||
'ugue, vitae gravida tellus sapien sit amet tellus. Aenea' +
|
||||
'n non diam orci. Proin quis elit turpis. Suspendisse non' +
|
||||
' diam ipsum. Suspendisse nec ullamcorper odio. Vestibulu' +
|
||||
'm arcu mi, sodales non suscipit id, ultrices ut massa. S' +
|
||||
'ed ac sem sit amet arcu malesuada fermentum. Nunc sed. ';
|
||||
const compressedString = 'G/gBQBwHdky2aHV5KK9Snf05//1pPdmNw/7232fnIm1IB' +
|
||||
'K1AA8RsN8OB8Nb7Lpgk3UWWUlzQXZyHQeBBbXMTQXC1j7' +
|
||||
'wg3LJs9LqOGHRH2bj/a2iCTLLx8hBOyTqgoVuD1e+Qqdn' +
|
||||
'f1rkUNyrWq6LtOhWgxP3QUwdhKGdZm3rJWaDDBV7+pDk1' +
|
||||
'MIkrmjp4ma2xVi5MsgJScA3tP1I7mXeby6MELozrwoBQD' +
|
||||
'mVTnEAicZNj4lkGqntJe2qSnGyeMmcFgraK94vCg/4iLu' +
|
||||
'Tw5RhKhnVY++dZ6niUBmRqIutsjf5TzwF5iAg8a9UkjF5' +
|
||||
'2eZ0tB2vo6v8SqVfNMkBmmhxr0NT9LkYF69aEjlYzj7IE' +
|
||||
'KmEUQf1HBogRYhFIt4ymRNEgHAIzOyNEsQM=';
|
||||
|
||||
t.test('compress then decompress', t =>
|
||||
new zlib.BrotliCompress().end(inputString).concat().then(buffer => {
|
||||
t.ok(inputString.length > buffer.length, 'buffer is shorter than input')
|
||||
|
||||
return new zlib.BrotliDecompress().end(buffer).concat().then(buffer =>
|
||||
t.equal(buffer.toString(), inputString))
|
||||
}))
|
||||
|
||||
t.test('decompress then check', t =>
|
||||
new zlib.BrotliDecompress({ encoding: 'utf8' })
|
||||
.end(compressedString, 'base64').concat().then(result =>
|
||||
t.equal(result, inputString)))
|
|
@ -0,0 +1,78 @@
|
|||
const t = require('tap')
|
||||
if (!require('zlib').BrotliDecompress) {
|
||||
t.plan(0, 'brotli not supported')
|
||||
process.exit(0)
|
||||
}
|
||||
const zlib = require('../')
|
||||
const fs = require('fs')
|
||||
const {resolve} = require('path')
|
||||
const fixture = resolve(__dirname, 'fixtures/pss-vectors.json')
|
||||
const sampleBuffer = fs.readFileSync(fixture)
|
||||
|
||||
// Test some brotli-specific properties of the brotli streams that can not
|
||||
// be easily covered through expanding zlib-only tests.
|
||||
|
||||
t.test('set quality param at stream creation', t => {
|
||||
// Test setting the quality parameter at stream creation:
|
||||
const sizes = []
|
||||
for (let quality = zlib.constants.BROTLI_MIN_QUALITY;
|
||||
quality <= zlib.constants.BROTLI_MAX_QUALITY;
|
||||
quality++) {
|
||||
const encoded = new zlib.BrotliCompress({
|
||||
params: {
|
||||
[zlib.constants.BROTLI_PARAM_QUALITY]: quality
|
||||
}
|
||||
}).end(sampleBuffer).read()
|
||||
sizes.push(encoded.length)
|
||||
}
|
||||
|
||||
// Increasing quality should roughly correspond to decreasing compressed size:
|
||||
for (let i = 0; i < sizes.length - 1; i++) {
|
||||
t.ok(sizes[i + 1] <= sizes[i] * 1.05,
|
||||
`size ${i+1} should be smaller than size ${i}`); // 5 % margin of error.
|
||||
}
|
||||
t.ok(sizes[0] > sizes[sizes.length - 1], 'first size larger than last')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('setting out of bound option valules or keys fails', t => {
|
||||
// Test that setting out-of-bounds option values or keys fails.
|
||||
t.throws(() => {
|
||||
new zlib.BrotliCompress({
|
||||
params: {
|
||||
10000: 0
|
||||
}
|
||||
})
|
||||
}, {
|
||||
code: 'ERR_BROTLI_INVALID_PARAM',
|
||||
message: '10000 is not a valid Brotli parameter',
|
||||
})
|
||||
|
||||
// Test that accidentally using duplicate keys fails.
|
||||
t.throws(() => {
|
||||
new zlib.BrotliCompress({
|
||||
params: {
|
||||
'0': 0,
|
||||
'00': 0
|
||||
}
|
||||
})
|
||||
}, {
|
||||
code: 'ERR_BROTLI_INVALID_PARAM',
|
||||
message: '00 is not a valid Brotli parameter'
|
||||
})
|
||||
|
||||
t.throws(() => {
|
||||
new zlib.BrotliCompress({
|
||||
params: {
|
||||
// This is a boolean flag
|
||||
[zlib.constants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: 42
|
||||
}
|
||||
})
|
||||
}, {
|
||||
code: 'ERR_ZLIB_INITIALIZATION_FAILED',
|
||||
message: 'Initialization failed'
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
|
@ -0,0 +1,7 @@
|
|||
const t = require('tap')
|
||||
const zlib = require('../')
|
||||
|
||||
t.equal(zlib.constants.Z_OK, 0, 'Z_OK should be 0')
|
||||
zlib.constants.Z_OK = 1
|
||||
t.equal(zlib.constants.Z_OK, 0, 'Z_OK should still be 0')
|
||||
t.ok(Object.isFrozen(zlib.constants), 'zlib.constants should be frozen')
|
|
@ -0,0 +1,45 @@
|
|||
'use strict'
|
||||
|
||||
const zlib = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
// Throws if `opts.chunkSize` is invalid
|
||||
t.throws(_ => new zlib.Deflate({chunkSize: -Infinity}))
|
||||
|
||||
// Confirm that maximum chunk size cannot be exceeded because it is `Infinity`.
|
||||
t.equal(zlib.constants.Z_MAX_CHUNK, Infinity)
|
||||
|
||||
// Throws if `opts.windowBits` is invalid
|
||||
t.throws(_ => new zlib.Deflate({windowBits: -Infinity, chunkSize: 12345}))
|
||||
t.throws(_ => new zlib.Deflate({windowBits: Infinity}))
|
||||
|
||||
// Throws if `opts.level` is invalid
|
||||
t.throws(_ => new zlib.Deflate({level: -Infinity}))
|
||||
t.throws(_ => new zlib.Deflate({level: Infinity}))
|
||||
|
||||
// Throws a RangeError if `level` invalid in `Deflate.prototype.params()`
|
||||
t.throws(_ => new zlib.Deflate().params(-Infinity))
|
||||
t.throws(_ => new zlib.Deflate().params(Infinity))
|
||||
|
||||
// Throws if `opts.memLevel` is invalid
|
||||
t.throws(_ => new zlib.Deflate({memLevel: -Infinity}))
|
||||
t.throws(_ => new zlib.Deflate({memLevel: Infinity}))
|
||||
|
||||
// Does not throw if opts.strategy is valid
|
||||
t.doesNotThrow(_ => new zlib.Deflate({strategy: zlib.constants.Z_FILTERED}))
|
||||
t.doesNotThrow(_ => new zlib.Deflate({strategy: zlib.constants.Z_HUFFMAN_ONLY}))
|
||||
t.doesNotThrow(_ => new zlib.Deflate({strategy: zlib.constants.Z_RLE}))
|
||||
t.doesNotThrow(_ => new zlib.Deflate({strategy: zlib.constants.Z_FIXED}))
|
||||
t.doesNotThrow(_ => new zlib.Deflate({ strategy: zlib.constants.Z_DEFAULT_STRATEGY}))
|
||||
|
||||
// Throws if opt.strategy is the wrong type.
|
||||
t.throws(_ => new zlib.Deflate({strategy: '' + zlib.constants.Z_RLE }))
|
||||
|
||||
// Throws if opts.strategy is invalid
|
||||
t.throws(_ => new zlib.Deflate({strategy: 'this is a bogus strategy'}))
|
||||
|
||||
// Throws TypeError if `strategy` is invalid in `Deflate.prototype.params()`
|
||||
t.throws(_ => new zlib.Deflate().params(0, 'I am an invalid strategy'))
|
||||
|
||||
// Throws if opts.dictionary is not a Buffer
|
||||
t.throws(_ => new zlib.Deflate({dictionary: 'not a buffer'}))
|
|
@ -0,0 +1,47 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const zlib = require('../')
|
||||
|
||||
// String "test" encoded with dictionary "dict".
|
||||
const input = Buffer.from([0x78, 0xBB, 0x04, 0x09, 0x01, 0xA5])
|
||||
|
||||
{
|
||||
const stream = new zlib.Inflate()
|
||||
|
||||
stream.on('error', err =>
|
||||
t.match(err, {
|
||||
message: 'zlib: Missing dictionary',
|
||||
errno: 2,
|
||||
code: 'Z_NEED_DICT',
|
||||
}))
|
||||
|
||||
stream.write(input)
|
||||
}
|
||||
|
||||
{
|
||||
const stream = new zlib.Inflate({ dictionary: Buffer.from('fail') })
|
||||
|
||||
stream.on('error', err =>
|
||||
t.match(err, {
|
||||
message: 'zlib: Bad dictionary',
|
||||
errno: 2,
|
||||
code: 'Z_NEED_DICT',
|
||||
}))
|
||||
|
||||
stream.write(input)
|
||||
}
|
||||
|
||||
{
|
||||
const stream = new zlib.InflateRaw({ dictionary: Buffer.from('fail') })
|
||||
|
||||
// It's not possible to separate invalid dict and invalid data when
|
||||
// using the raw format
|
||||
stream.on('error', err =>
|
||||
t.match(err, {
|
||||
message: 'zlib: invalid stored block lengths',
|
||||
errno: -3,
|
||||
code: 'Z_DATA_ERROR',
|
||||
}))
|
||||
|
||||
stream.write(input)
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
'use strict'
|
||||
// test compression/decompression with dictionary
|
||||
|
||||
const t = require('tap')
|
||||
const zlib = require('../')
|
||||
|
||||
const spdyDict = Buffer.from([
|
||||
'optionsgetheadpostputdeletetraceacceptaccept-charsetaccept-encodingaccept-',
|
||||
'languageauthorizationexpectfromhostif-modified-sinceif-matchif-none-matchi',
|
||||
'f-rangeif-unmodifiedsincemax-forwardsproxy-authorizationrangerefererteuser',
|
||||
'-agent10010120020120220320420520630030130230330430530630740040140240340440',
|
||||
'5406407408409410411412413414415416417500501502503504505accept-rangesageeta',
|
||||
'glocationproxy-authenticatepublicretry-afterservervarywarningwww-authentic',
|
||||
'ateallowcontent-basecontent-encodingcache-controlconnectiondatetrailertran',
|
||||
'sfer-encodingupgradeviawarningcontent-languagecontent-lengthcontent-locati',
|
||||
'oncontent-md5content-rangecontent-typeetagexpireslast-modifiedset-cookieMo',
|
||||
'ndayTuesdayWednesdayThursdayFridaySaturdaySundayJanFebMarAprMayJunJulAugSe',
|
||||
'pOctNovDecchunkedtext/htmlimage/pngimage/jpgimage/gifapplication/xmlapplic',
|
||||
'ation/xhtmltext/plainpublicmax-agecharset=iso-8859-1utf-8gzipdeflateHTTP/1',
|
||||
'.1statusversionurl\0'
|
||||
].join(''))
|
||||
|
||||
const input = [
|
||||
'HTTP/1.1 200 Ok',
|
||||
'Server: node.js',
|
||||
'Content-Length: 0',
|
||||
''
|
||||
].join('\r\n')
|
||||
|
||||
t.test('basic dictionary test', t => {
|
||||
t.plan(1)
|
||||
let output = ''
|
||||
const deflate = new zlib.Deflate({ dictionary: spdyDict })
|
||||
const inflate = new zlib.Inflate({ dictionary: spdyDict })
|
||||
inflate.setEncoding('utf-8')
|
||||
|
||||
deflate.on('data', chunk => inflate.write(chunk))
|
||||
inflate.on('data', chunk => output += chunk)
|
||||
deflate.on('end', _ => inflate.end())
|
||||
inflate.on('end', _ => t.equal(input, output))
|
||||
|
||||
deflate.write(input)
|
||||
deflate.end()
|
||||
})
|
||||
|
||||
t.test('deflate reset dictionary test', t => {
|
||||
t.plan(1)
|
||||
let doneReset = false
|
||||
let output = ''
|
||||
const deflate = new zlib.Deflate({ dictionary: spdyDict })
|
||||
const inflate = new zlib.Inflate({ dictionary: spdyDict })
|
||||
inflate.setEncoding('utf-8')
|
||||
|
||||
deflate.on('data', chunk => {
|
||||
if (doneReset)
|
||||
inflate.write(chunk)
|
||||
})
|
||||
inflate.on('data', chunk => output += chunk)
|
||||
deflate.on('end', _ => inflate.end())
|
||||
inflate.on('end', _ => t.equal(input, output))
|
||||
|
||||
deflate.write(input)
|
||||
deflate.flush()
|
||||
deflate.reset()
|
||||
doneReset = true
|
||||
deflate.write(input)
|
||||
deflate.end()
|
||||
})
|
||||
|
||||
t.test('raw dictionary test', t => {
|
||||
t.plan(1)
|
||||
let output = ''
|
||||
const deflate = new zlib.DeflateRaw({ dictionary: spdyDict })
|
||||
const inflate = new zlib.InflateRaw({ dictionary: spdyDict })
|
||||
inflate.setEncoding('utf-8')
|
||||
|
||||
deflate.on('data', chunk => inflate.write(chunk))
|
||||
inflate.on('data', chunk => output += chunk)
|
||||
deflate.on('end', _ => inflate.end())
|
||||
inflate.on('end', _ => t.equal(input, output))
|
||||
|
||||
deflate.write(input)
|
||||
deflate.end()
|
||||
})
|
||||
|
||||
t.test('deflate raw reset dictionary test', t => {
|
||||
t.plan(1)
|
||||
let doneReset = false
|
||||
let output = ''
|
||||
const deflate = new zlib.DeflateRaw({ dictionary: spdyDict })
|
||||
const inflate = new zlib.InflateRaw({ dictionary: spdyDict })
|
||||
inflate.setEncoding('utf-8')
|
||||
|
||||
deflate.on('data', chunk => {
|
||||
if (doneReset)
|
||||
inflate.write(chunk)
|
||||
})
|
||||
inflate.on('data', chunk => output += chunk)
|
||||
deflate.on('end', _ => inflate.end())
|
||||
inflate.on('end', _ => t.equal(input, output))
|
||||
|
||||
deflate.write(input)
|
||||
deflate.flush()
|
||||
deflate.reset()
|
||||
doneReset = true
|
||||
deflate.write(input)
|
||||
deflate.end()
|
||||
})
|
File diff suppressed because one or more lines are too long
Binary file not shown.
After Width: | Height: | Size: 45 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue