New upstream version 17.0.3+~cs10.3.7
This commit is contained in:
parent
a082893e1f
commit
1d7d8a3275
|
@ -0,0 +1,10 @@
|
|||
/* This file is automatically added by @npmcli/template-oss. Do not edit. */
|
||||
|
||||
module.exports = {
|
||||
extends: ['@commitlint/config-conventional'],
|
||||
rules: {
|
||||
'type-enum': [2, 'always', ['feat', 'fix', 'docs', 'deps', 'chore']],
|
||||
'header-max-length': [2, 'always', 80],
|
||||
'subject-case': [0, 'always', ['lower-case', 'sentence-case', 'start-case']],
|
||||
},
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
/* This file is automatically added by @npmcli/template-oss. Do not edit. */
|
||||
|
||||
'use strict'
|
||||
|
||||
const { readdirSync: readdir } = require('fs')
|
||||
|
||||
const localConfigs = readdir(__dirname)
|
||||
.filter((file) => file.startsWith('.eslintrc.local.'))
|
||||
.map((file) => `./${file}`)
|
||||
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: [
|
||||
'@npmcli',
|
||||
...localConfigs,
|
||||
],
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
rules: {
|
||||
strict: 'error',
|
||||
'no-shadow': 0, // XXX: fix this later
|
||||
},
|
||||
}
|
|
@ -1 +1,3 @@
|
|||
* @npm/cli-team
|
||||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
* @npm/cli-team
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: Bug
|
||||
description: File a bug/issue
|
||||
title: "[BUG] <title>"
|
||||
labels: [ Bug, Needs Triage ]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please [search here](./issues) to see if an issue already exists for your problem.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current Behavior
|
||||
description: A clear & concise description of what you're experiencing.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A clear & concise description of what you expected to happen.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Steps to reproduce the behavior.
|
||||
value: |
|
||||
1. In this environment...
|
||||
2. With this config...
|
||||
3. Run '...'
|
||||
4. See error...
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Environment
|
||||
description: |
|
||||
examples:
|
||||
- **npm**: 7.6.3
|
||||
- **Node**: 13.14.0
|
||||
- **OS**: Ubuntu 20.04
|
||||
- **platform**: Macbook Pro
|
||||
value: |
|
||||
- npm:
|
||||
- Node:
|
||||
- OS:
|
||||
- platform:
|
||||
validations:
|
||||
required: false
|
|
@ -0,0 +1,3 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
blank_issues_enabled: true
|
|
@ -0,0 +1,17 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
allow:
|
||||
- dependency-type: direct
|
||||
versioning-strategy: increase-if-necessary
|
||||
commit-message:
|
||||
prefix: deps
|
||||
prefix-development: chore
|
||||
labels:
|
||||
- "Dependencies"
|
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"//@npmcli/template-oss": "This file is automatically added by @npmcli/template-oss. Do not edit.",
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "tap",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*not ok \\d+ - (.*)",
|
||||
"message": 1
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*---"
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*at:"
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*line:\\s*(\\d+)",
|
||||
"line": 1
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*column:\\s*(\\d+)",
|
||||
"column": 1
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*file:\\s*(.*)",
|
||||
"file": 1
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,2 +1,2 @@
|
|||
---
|
||||
_extends: 'open-source-project-boilerplate'
|
||||
_extends: '.github:npm-cli/settings.yml'
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: Audit
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# "At 08:00 UTC (01:00 PT) on Monday" https://crontab.guru/#0_8_*_*_1
|
||||
- cron: "0 8 * * 1"
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
name: Audit Dependencies
|
||||
if: github.repository_owner == 'npm'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund --package-lock
|
||||
- name: Run Production Audit
|
||||
run: npm audit --omit=dev
|
||||
- name: Run Full Audit
|
||||
run: npm audit --audit-level=none
|
|
@ -0,0 +1,213 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: CI - Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
required: true
|
||||
type: string
|
||||
default: main
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
required: true
|
||||
type: string
|
||||
check-sha:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
lint-all:
|
||||
name: Lint All
|
||||
if: github.repository_owner == 'npm'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Get Workflow Job
|
||||
uses: actions/github-script@v6
|
||||
if: inputs.check-sha
|
||||
id: check-output
|
||||
env:
|
||||
JOB_NAME: "Lint All"
|
||||
MATRIX_NAME: ""
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
const { data } = await github.rest.actions.listJobsForWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: context.runId,
|
||||
per_page: 100
|
||||
})
|
||||
|
||||
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
|
||||
const job = data.jobs.find(j => j.name.endsWith(jobName))
|
||||
const jobUrl = job?.html_url
|
||||
|
||||
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
|
||||
|
||||
let summary = `This check is assosciated with ${shaUrl}\n\n`
|
||||
|
||||
if (jobUrl) {
|
||||
summary += `For run logs, click here: ${jobUrl}`
|
||||
} else {
|
||||
summary += `Run logs could not be found for a job with name: "${jobName}"`
|
||||
}
|
||||
|
||||
return { summary }
|
||||
- name: Create Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
id: check
|
||||
if: inputs.check-sha
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
status: in_progress
|
||||
name: Lint All
|
||||
sha: ${{ inputs.check-sha }}
|
||||
output: ${{ steps.check-output.outputs.result }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Lint
|
||||
run: npm run lint --ignore-scripts
|
||||
- name: Post Lint
|
||||
run: npm run postlint --ignore-scripts
|
||||
- name: Conclude Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
if: steps.check.outputs.check_id && always()
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
conclusion: ${{ job.status }}
|
||||
check_id: ${{ steps.check.outputs.check_id }}
|
||||
|
||||
test-all:
|
||||
name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
|
||||
if: github.repository_owner == 'npm'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- name: Linux
|
||||
os: ubuntu-latest
|
||||
shell: bash
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
shell: bash
|
||||
node-version:
|
||||
- 14.17.0
|
||||
- 14.x
|
||||
- 16.13.0
|
||||
- 16.x
|
||||
- 18.0.0
|
||||
- 18.x
|
||||
runs-on: ${{ matrix.platform.os }}
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.platform.shell }}
|
||||
steps:
|
||||
- name: Get Workflow Job
|
||||
uses: actions/github-script@v6
|
||||
if: inputs.check-sha
|
||||
id: check-output
|
||||
env:
|
||||
JOB_NAME: "Test All"
|
||||
MATRIX_NAME: " - ${{ matrix.platform.name }} - ${{ matrix.node-version }}"
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
const { data } = await github.rest.actions.listJobsForWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: context.runId,
|
||||
per_page: 100
|
||||
})
|
||||
|
||||
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
|
||||
const job = data.jobs.find(j => j.name.endsWith(jobName))
|
||||
const jobUrl = job?.html_url
|
||||
|
||||
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ inputs.check-sha }}`
|
||||
|
||||
let summary = `This check is assosciated with ${shaUrl}\n\n`
|
||||
|
||||
if (jobUrl) {
|
||||
summary += `For run logs, click here: ${jobUrl}`
|
||||
} else {
|
||||
summary += `Run logs could not be found for a job with name: "${jobName}"`
|
||||
}
|
||||
|
||||
return { summary }
|
||||
- name: Create Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
id: check
|
||||
if: inputs.check-sha
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
status: in_progress
|
||||
name: Test All - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
|
||||
sha: ${{ inputs.check-sha }}
|
||||
output: ${{ steps.check-output.outputs.result }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Update Windows npm
|
||||
# node 12 and 14 ship with npm@6, which is known to fail when updating itself in windows
|
||||
if: matrix.platform.os == 'windows-latest' && (startsWith(matrix.node-version, '12.') || startsWith(matrix.node-version, '14.'))
|
||||
run: |
|
||||
curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz
|
||||
tar xf npm-7.5.4.tgz
|
||||
cd package
|
||||
node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz
|
||||
cd ..
|
||||
rmdir /s /q package
|
||||
- name: Install npm@7
|
||||
if: startsWith(matrix.node-version, '10.')
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@7
|
||||
- name: Install npm@latest
|
||||
if: ${{ !startsWith(matrix.node-version, '10.') }}
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Add Problem Matcher
|
||||
run: echo "::add-matcher::.github/matchers/tap.json"
|
||||
- name: Test
|
||||
run: npm test --ignore-scripts
|
||||
- name: Conclude Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
if: steps.check.outputs.check_id && always()
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
conclusion: ${{ job.status }}
|
||||
check_id: ${{ steps.check.outputs.check_id }}
|
|
@ -1,95 +1,104 @@
|
|||
---
|
||||
################################################################################
|
||||
# Template - Node CI
|
||||
#
|
||||
# Description:
|
||||
# This contains the basic information to: install dependencies, run tests,
|
||||
# get coverage, and run linting on a nodejs project. This template will run
|
||||
# over the MxN matrix of all operating systems, and all current LTS versions
|
||||
# of NodeJS.
|
||||
#
|
||||
# Dependencies:
|
||||
# This template assumes that your project is using the `tap` module for
|
||||
# testing. If you're not using this module, then the step that runs your
|
||||
# coverage will need to be adjusted.
|
||||
#
|
||||
################################################################################
|
||||
name: Node CI
|
||||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
on: [push, pull_request]
|
||||
name: CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- latest
|
||||
schedule:
|
||||
# "At 09:00 UTC (02:00 PT) on Monday" https://crontab.guru/#0_9_*_*_1
|
||||
- cron: "0 9 * * 1"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
lint:
|
||||
name: Lint
|
||||
if: github.repository_owner == 'npm'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Lint
|
||||
run: npm run lint --ignore-scripts
|
||||
- name: Post Lint
|
||||
run: npm run postlint --ignore-scripts
|
||||
|
||||
test:
|
||||
name: Test - ${{ matrix.platform.name }} - ${{ matrix.node-version }}
|
||||
if: github.repository_owner == 'npm'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [10.x, 12.x, 13.x]
|
||||
os: [ubuntu-latest, macOS-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
platform:
|
||||
- name: Linux
|
||||
os: ubuntu-latest
|
||||
shell: bash
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
shell: bash
|
||||
node-version:
|
||||
- 14.17.0
|
||||
- 14.x
|
||||
- 16.13.0
|
||||
- 16.x
|
||||
- 18.0.0
|
||||
- 18.x
|
||||
runs-on: ${{ matrix.platform.os }}
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.platform.shell }}
|
||||
steps:
|
||||
# Checkout the repository
|
||||
- uses: actions/checkout@v2
|
||||
# Installs the specific version of Node.js
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
################################################################################
|
||||
# Install Dependencies
|
||||
#
|
||||
# ASSUMPTIONS:
|
||||
# - The project has a package-lock.json file
|
||||
#
|
||||
# Simply run the tests for the project.
|
||||
################################################################################
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
################################################################################
|
||||
# Run Testing
|
||||
#
|
||||
# ASSUMPTIONS:
|
||||
# - The project has `tap` as a devDependency
|
||||
# - There is a script called "test" in the package.json
|
||||
#
|
||||
# Simply run the tests for the project.
|
||||
################################################################################
|
||||
- name: Run tests
|
||||
run: npm test -- --no-coverage
|
||||
|
||||
################################################################################
|
||||
# Run coverage check
|
||||
#
|
||||
# ASSUMPTIONS:
|
||||
# - The project has `tap` as a devDependency
|
||||
# - There is a script called "coverage" in the package.json
|
||||
#
|
||||
# Coverage should only be posted once, we are choosing the latest LTS of
|
||||
# node, and ubuntu as the matrix point to post coverage from. We limit
|
||||
# to the 'push' event so that coverage ins't posted twice from the
|
||||
# pull-request event, and push event (line 3).
|
||||
################################################################################
|
||||
- name: Run coverage report
|
||||
if: github.event_name == 'push' && matrix.node-version == '12.x' && matrix.os == 'ubuntu-latest'
|
||||
run: npm run coverage
|
||||
env:
|
||||
# The environment variable name is leveraged by `tap`
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
|
||||
|
||||
################################################################################
|
||||
# Run linting
|
||||
#
|
||||
# ASSUMPTIONS:
|
||||
# - There is a script called "lint" in the package.json
|
||||
#
|
||||
# We run linting AFTER we run testing and coverage checks, because if a step
|
||||
# fails in an GitHub Action, all other steps are not run. We don't want to
|
||||
# fail to run tests or coverage because of linting. It should be the lowest
|
||||
# priority of all the steps.
|
||||
################################################################################
|
||||
- name: Run linter
|
||||
if: github.event_name == 'push' && matrix.node-version == '12.x' && matrix.os == 'ubuntu-latest'
|
||||
run: npm run lint
|
||||
- name: Update Windows npm
|
||||
# node 12 and 14 ship with npm@6, which is known to fail when updating itself in windows
|
||||
if: matrix.platform.os == 'windows-latest' && (startsWith(matrix.node-version, '12.') || startsWith(matrix.node-version, '14.'))
|
||||
run: |
|
||||
curl -sO https://registry.npmjs.org/npm/-/npm-7.5.4.tgz
|
||||
tar xf npm-7.5.4.tgz
|
||||
cd package
|
||||
node lib/npm.js install --no-fund --no-audit -g ..\npm-7.5.4.tgz
|
||||
cd ..
|
||||
rmdir /s /q package
|
||||
- name: Install npm@7
|
||||
if: startsWith(matrix.node-version, '10.')
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@7
|
||||
- name: Install npm@latest
|
||||
if: ${{ !startsWith(matrix.node-version, '10.') }}
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Add Problem Matcher
|
||||
run: echo "::add-matcher::.github/matchers/tap.json"
|
||||
- name: Test
|
||||
run: npm test --ignore-scripts
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- latest
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- latest
|
||||
schedule:
|
||||
# "At 10:00 UTC (03:00 PT) on Monday" https://crontab.guru/#0_10_*_*_1
|
||||
- cron: "0 10 * * 1"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: javascript
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
|
@ -0,0 +1,121 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: Post Dependabot
|
||||
|
||||
on: pull_request
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
template-oss:
|
||||
name: template-oss
|
||||
if: github.repository_owner == 'npm' && github.actor == 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Fetch Dependabot Metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Dependabot can update multiple directories so we output which directory
|
||||
# it is acting on so we can run the command for the correct root or workspace
|
||||
- name: Get Dependabot Directory
|
||||
if: contains(steps.metadata.outputs.dependency-names, '@npmcli/template-oss')
|
||||
id: flags
|
||||
run: |
|
||||
dependabot_dir="${{ steps.metadata.outputs.directory }}"
|
||||
if [[ "$dependabot_dir" == "/" ]]; then
|
||||
echo "::set-output name=workspace::-iwr"
|
||||
else
|
||||
# strip leading slash from directory so it works as a
|
||||
# a path to the workspace flag
|
||||
echo "::set-output name=workspace::-w ${dependabot_dir#/}"
|
||||
fi
|
||||
|
||||
- name: Apply Changes
|
||||
if: steps.flags.outputs.workspace
|
||||
id: apply
|
||||
run: |
|
||||
npm run template-oss-apply ${{ steps.flags.outputs.workspace }}
|
||||
if [[ `git status --porcelain` ]]; then
|
||||
echo "::set-output name=changes::true"
|
||||
fi
|
||||
# This only sets the conventional commit prefix. This workflow can't reliably determine
|
||||
# what the breaking change is though. If a BREAKING CHANGE message is required then
|
||||
# this PR check will fail and the commit will be amended with stafftools
|
||||
if [[ "${{ steps.metadata.outputs.update-type }}" == "version-update:semver-major" ]]; then
|
||||
prefix='feat!'
|
||||
else
|
||||
prefix='chore'
|
||||
fi
|
||||
echo "::set-output name=message::$prefix: postinstall for dependabot template-oss PR"
|
||||
|
||||
# This step will fail if template-oss has made any workflow updates. It is impossible
|
||||
# for a workflow to update other workflows. In the case it does fail, we continue
|
||||
# and then try to apply only a portion of the changes in the next step
|
||||
- name: Push All Changes
|
||||
if: steps.apply.outputs.changes
|
||||
id: push
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
git commit -am "${{ steps.apply.outputs.message }}"
|
||||
git push
|
||||
|
||||
# If the previous step failed, then reset the commit and remove any workflow changes
|
||||
# and attempt to commit and push again. This is helpful because we will have a commit
|
||||
# with the correct prefix that we can then --amend with @npmcli/stafftools later.
|
||||
- name: Push All Changes Except Workflows
|
||||
if: steps.apply.outputs.changes && steps.push.outcome == 'failure'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
git reset HEAD~
|
||||
git checkout HEAD -- .github/workflows/
|
||||
git clean -fd .github/workflows/
|
||||
git commit -am "${{ steps.apply.outputs.message }}"
|
||||
git push
|
||||
|
||||
# Check if all the necessary template-oss changes were applied. Since we continued
|
||||
# on errors in one of the previous steps, this check will fail if our follow up
|
||||
# only applied a portion of the changes and we need to followup manually.
|
||||
#
|
||||
# Note that this used to run `lint` and `postlint` but that will fail this action
|
||||
# if we've also shipped any linting changes separate from template-oss. We do
|
||||
# linting in another action, so we want to fail this one only if there are
|
||||
# template-oss changes that could not be applied.
|
||||
- name: Check Changes
|
||||
if: steps.apply.outputs.changes
|
||||
run: |
|
||||
npm exec --offline ${{ steps.flags.outputs.workspace }} -- template-oss-check
|
||||
|
||||
- name: Fail on Breaking Change
|
||||
if: steps.apply.outputs.changes && startsWith(steps.apply.outputs.message, 'feat!')
|
||||
run: |
|
||||
echo "This PR has a breaking change. Run 'npx -p @npmcli/stafftools gh template-oss-fix'"
|
||||
echo "for more information on how to fix this with a BREAKING CHANGE footer."
|
||||
exit 1
|
|
@ -0,0 +1,48 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- edited
|
||||
- synchronize
|
||||
|
||||
jobs:
|
||||
commitlint:
|
||||
name: Lint Commits
|
||||
if: github.repository_owner == 'npm'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Run Commitlint on Commits
|
||||
id: commit
|
||||
continue-on-error: true
|
||||
run: |
|
||||
npx --offline commitlint -V --from origin/${{ github.base_ref }} --to ${{ github.event.pull_request.head.sha }}
|
||||
- name: Run Commitlint on PR Title
|
||||
if: steps.commit.outcome == 'failure'
|
||||
run: |
|
||||
echo ${{ github.event.pull_request.title }} | npx --offline commitlint -V
|
|
@ -0,0 +1,299 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- latest
|
||||
- release/v*
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
checks: write
|
||||
|
||||
jobs:
|
||||
release:
|
||||
outputs:
|
||||
pr: ${{ steps.release.outputs.pr }}
|
||||
releases: ${{ steps.release.outputs.releases }}
|
||||
release-flags: ${{ steps.release.outputs.release-flags }}
|
||||
branch: ${{ steps.release.outputs.pr-branch }}
|
||||
pr-number: ${{ steps.release.outputs.pr-number }}
|
||||
comment-id: ${{ steps.pr-comment.outputs.result }}
|
||||
check-id: ${{ steps.check.outputs.check_id }}
|
||||
name: Release
|
||||
if: github.repository_owner == 'npm'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Release Please
|
||||
id: release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
npx --offline template-oss-release-please ${{ github.ref_name }} ${{ github.event_name }}
|
||||
- name: Post Pull Request Comment
|
||||
if: steps.release.outputs.pr-number
|
||||
uses: actions/github-script@v6
|
||||
id: pr-comment
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.release.outputs.pr-number }}
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
with:
|
||||
script: |
|
||||
const { REF_NAME, PR_NUMBER } = process.env
|
||||
const repo = { owner: context.repo.owner, repo: context.repo.repo }
|
||||
const issue = { ...repo, issue_number: PR_NUMBER }
|
||||
|
||||
const { data: workflow } = await github.rest.actions.getWorkflowRun({ ...repo, run_id: context.runId })
|
||||
|
||||
let body = '## Release Manager\n\n'
|
||||
|
||||
const comments = await github.paginate(github.rest.issues.listComments, issue)
|
||||
let commentId = comments?.find(c => c.user.login === 'github-actions[bot]' && c.body.startsWith(body))?.id
|
||||
|
||||
body += `Release workflow run: ${workflow.html_url}\n\n#### Force CI to Update This Release\n\n`
|
||||
body += `This PR will be updated and CI will run for every non-\`chore:\` commit that is pushed to \`main\`. `
|
||||
body += `To force CI to update this PR, run this command:\n\n`
|
||||
body += `\`\`\`\ngh workflow run release.yml -r ${REF_NAME}\n\`\`\``
|
||||
|
||||
if (commentId) {
|
||||
await github.rest.issues.updateComment({ ...repo, comment_id: commentId, body })
|
||||
} else {
|
||||
const { data: comment } = await github.rest.issues.createComment({ ...issue, body })
|
||||
commentId = comment?.id
|
||||
}
|
||||
|
||||
return commentId
|
||||
- name: Get Workflow Job
|
||||
uses: actions/github-script@v6
|
||||
if: steps.release.outputs.pr-sha
|
||||
id: check-output
|
||||
env:
|
||||
JOB_NAME: "Release"
|
||||
MATRIX_NAME: ""
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
const { data } = await github.rest.actions.listJobsForWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: context.runId,
|
||||
per_page: 100
|
||||
})
|
||||
|
||||
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
|
||||
const job = data.jobs.find(j => j.name.endsWith(jobName))
|
||||
const jobUrl = job?.html_url
|
||||
|
||||
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.release.outputs.pr-sha }}`
|
||||
|
||||
let summary = `This check is assosciated with ${shaUrl}\n\n`
|
||||
|
||||
if (jobUrl) {
|
||||
summary += `For run logs, click here: ${jobUrl}`
|
||||
} else {
|
||||
summary += `Run logs could not be found for a job with name: "${jobName}"`
|
||||
}
|
||||
|
||||
return { summary }
|
||||
- name: Create Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
id: check
|
||||
if: steps.release.outputs.pr-sha
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
status: in_progress
|
||||
name: Release
|
||||
sha: ${{ steps.release.outputs.pr-sha }}
|
||||
output: ${{ steps.check-output.outputs.result }}
|
||||
|
||||
update:
|
||||
needs: release
|
||||
outputs:
|
||||
sha: ${{ steps.commit.outputs.sha }}
|
||||
check-id: ${{ steps.check.outputs.check_id }}
|
||||
name: Update - Release
|
||||
if: github.repository_owner == 'npm' && needs.release.outputs.pr
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ needs.release.outputs.branch }}
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Run Post Pull Request Actions
|
||||
env:
|
||||
RELEASE_PR_NUMBER: ${{ needs.release.outputs.pr-number }}
|
||||
RELEASE_COMMENT_ID: ${{ needs.release.outputs.comment-id }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
npm exec --offline -- template-oss-release-manager --lockfile=false
|
||||
npm run rp-pull-request --ignore-scripts --if-present
|
||||
- name: Commit
|
||||
id: commit
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
git commit --all --amend --no-edit || true
|
||||
git push --force-with-lease
|
||||
echo "::set-output name=sha::$(git rev-parse HEAD)"
|
||||
- name: Get Workflow Job
|
||||
uses: actions/github-script@v6
|
||||
if: steps.commit.outputs.sha
|
||||
id: check-output
|
||||
env:
|
||||
JOB_NAME: "Update - Release"
|
||||
MATRIX_NAME: ""
|
||||
with:
|
||||
script: |
|
||||
const { owner, repo } = context.repo
|
||||
|
||||
const { data } = await github.rest.actions.listJobsForWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: context.runId,
|
||||
per_page: 100
|
||||
})
|
||||
|
||||
const jobName = process.env.JOB_NAME + process.env.MATRIX_NAME
|
||||
const job = data.jobs.find(j => j.name.endsWith(jobName))
|
||||
const jobUrl = job?.html_url
|
||||
|
||||
const shaUrl = `${context.serverUrl}/${owner}/${repo}/commit/${{ steps.commit.outputs.sha }}`
|
||||
|
||||
let summary = `This check is assosciated with ${shaUrl}\n\n`
|
||||
|
||||
if (jobUrl) {
|
||||
summary += `For run logs, click here: ${jobUrl}`
|
||||
} else {
|
||||
summary += `Run logs could not be found for a job with name: "${jobName}"`
|
||||
}
|
||||
|
||||
return { summary }
|
||||
- name: Create Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
id: check
|
||||
if: steps.commit.outputs.sha
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
status: in_progress
|
||||
name: Release
|
||||
sha: ${{ steps.commit.outputs.sha }}
|
||||
output: ${{ steps.check-output.outputs.result }}
|
||||
- name: Conclude Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
if: needs.release.outputs.check-id && always()
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
conclusion: ${{ job.status }}
|
||||
check_id: ${{ needs.release.outputs.check-id }}
|
||||
|
||||
ci:
|
||||
name: CI - Release
|
||||
needs: [ release, update ]
|
||||
if: needs.release.outputs.pr
|
||||
uses: ./.github/workflows/ci-release.yml
|
||||
with:
|
||||
ref: ${{ needs.release.outputs.branch }}
|
||||
check-sha: ${{ needs.update.outputs.sha }}
|
||||
|
||||
post-ci:
|
||||
needs: [ release, update, ci ]
|
||||
name: Post CI - Release
|
||||
if: github.repository_owner == 'npm' && needs.release.outputs.pr && always()
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Get Needs Result
|
||||
id: needs-result
|
||||
run: |
|
||||
result=""
|
||||
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" ]]; then
|
||||
result="failure"
|
||||
elif [[ "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
|
||||
result="cancelled"
|
||||
else
|
||||
result="success"
|
||||
fi
|
||||
echo "::set-output name=result::$result"
|
||||
- name: Conclude Check
|
||||
uses: LouisBrunner/checks-action@v1.3.1
|
||||
if: needs.update.outputs.check-id && always()
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
conclusion: ${{ steps.needs-result.outputs.result }}
|
||||
check_id: ${{ needs.update.outputs.check-id }}
|
||||
|
||||
post-release:
|
||||
needs: release
|
||||
name: Post Release - Release
|
||||
if: github.repository_owner == 'npm' && needs.release.outputs.releases
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Git User
|
||||
run: |
|
||||
git config --global user.email "npm-cli+bot@github.com"
|
||||
git config --global user.name "npm CLI robot"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18.x
|
||||
- name: Install npm@latest
|
||||
run: npm i --prefer-online --no-fund --no-audit -g npm@latest
|
||||
- name: npm Version
|
||||
run: npm -v
|
||||
- name: Install Dependencies
|
||||
run: npm i --ignore-scripts --no-audit --no-fund
|
||||
- name: Run Post Release Actions
|
||||
env:
|
||||
RELEASES: ${{ needs.release.outputs.releases }}
|
||||
run: |
|
||||
npm run rp-release --ignore-scripts --if-present ${{ join(fromJSON(needs.release.outputs.release-flags), ' ') }}
|
|
@ -0,0 +1,28 @@
|
|||
# This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
# ignore everything in the root
|
||||
/*
|
||||
|
||||
# keep these
|
||||
!**/.gitignore
|
||||
!/.commitlintrc.js
|
||||
!/.eslintrc.js
|
||||
!/.eslintrc.local.*
|
||||
!/.github/
|
||||
!/.gitignore
|
||||
!/.npmrc
|
||||
!/.release-please-manifest.json
|
||||
!/bin/
|
||||
!/CHANGELOG*
|
||||
!/CODE_OF_CONDUCT.md
|
||||
!/docs/
|
||||
!/lib/
|
||||
!/LICENSE*
|
||||
!/map.js
|
||||
!/package.json
|
||||
!/README*
|
||||
!/release-please-config.json
|
||||
!/scripts/
|
||||
!/SECURITY.md
|
||||
!/tap-snapshots/
|
||||
!/test/
|
|
@ -0,0 +1,3 @@
|
|||
; This file is automatically added by @npmcli/template-oss. Do not edit.
|
||||
|
||||
package-lock=false
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
".": "17.0.3"
|
||||
}
|
158
CHANGELOG.md
158
CHANGELOG.md
|
@ -1,6 +1,162 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
## [17.0.3](https://github.com/npm/cacache/compare/v17.0.2...v17.0.3) (2022-12-07)
|
||||
|
||||
### Dependencies
|
||||
|
||||
* [`0dc98f7`](https://github.com/npm/cacache/commit/0dc98f7ca0940ea010ef3ba5257887e36083b3a2) [#156](https://github.com/npm/cacache/pull/156) bump minipass from 3.3.6 to 4.0.0
|
||||
|
||||
## [17.0.2](https://github.com/npm/cacache/compare/v17.0.1...v17.0.2) (2022-11-04)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [`4a7382f`](https://github.com/npm/cacache/commit/4a7382f5e6c72c59587d45167346c1b6e81a3cde) [#152](https://github.com/npm/cacache/pull/152) replace @npmcli/move-file with @npmcli/fs (@lukekarrys)
|
||||
|
||||
## [17.0.1](https://github.com/npm/cacache/compare/v17.0.0...v17.0.1) (2022-10-17)
|
||||
|
||||
### Dependencies
|
||||
|
||||
* [`d3515de`](https://github.com/npm/cacache/commit/d3515dec8ee6305d564389f5e52363637666f718) [#146](https://github.com/npm/cacache/pull/146) bump unique-filename from 2.0.1 to 3.0.0
|
||||
* [`e57ebd9`](https://github.com/npm/cacache/commit/e57ebd9edcd4ac93df7ccbe1eee66a7a2c41c0a7) [#143](https://github.com/npm/cacache/pull/143) bump ssri from 9.0.1 to 10.0.0
|
||||
* [`9dd537a`](https://github.com/npm/cacache/commit/9dd537a5ab53f5f84e16ff9e69ebd9f28e3f8c54) [#144](https://github.com/npm/cacache/pull/144) bump @npmcli/move-file from 2.0.1 to 3.0.0
|
||||
|
||||
## [17.0.0](https://github.com/npm/cacache/compare/v16.1.3...v17.0.0) (2022-10-13)
|
||||
|
||||
### ⚠️ BREAKING CHANGES
|
||||
|
||||
* this module no longer attempts to change file ownership automatically
|
||||
* this package is now async only, all synchronous methods have been removed
|
||||
* `cacache` is now compatible with the following semver range for node: `^14.17.0 || ^16.13.0 || >=18.0.0`
|
||||
|
||||
### Features
|
||||
|
||||
* [`479b135`](https://github.com/npm/cacache/commit/479b1352a72ea3a6bc403545e269d3add985c6ee) [#141](https://github.com/npm/cacache/pull/141) do not alter file ownership (#141) (@nlf)
|
||||
* [`f57bb4d`](https://github.com/npm/cacache/commit/f57bb4d3ec6147843fac673c6578c3d231f336df) [#140](https://github.com/npm/cacache/pull/140) remove sync methods (#140) (@nlf)
|
||||
* [`cfebcde`](https://github.com/npm/cacache/commit/cfebcdea59e3fc1ff33fbe4b3fa6f05aa765326f) [#133](https://github.com/npm/cacache/pull/133) postinstall for dependabot template-oss PR (@lukekarrys)
|
||||
|
||||
## [16.1.3](https://github.com/npm/cacache/compare/v16.1.2...v16.1.3) (2022-08-23)
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* bump unique-filename from 1.1.1 to 2.0.0 ([#123](https://github.com/npm/cacache/issues/123)) ([6235554](https://github.com/npm/cacache/commit/6235554e46d19a9d9af25f87aa797fb85efc9519))
|
||||
|
||||
## [16.1.2](https://github.com/npm/cacache/compare/v16.1.1...v16.1.2) (2022-08-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* linting ([#121](https://github.com/npm/cacache/issues/121)) ([a683cff](https://github.com/npm/cacache/commit/a683cffdfd956e1a4ac0e5ccbfa30615192e1ea0))
|
||||
|
||||
## [16.1.1](https://github.com/npm/cacache/compare/v16.1.0...v16.1.1) (2022-06-02)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **read:** change lstat to stat to correctly evaluate file size ([#114](https://github.com/npm/cacache/issues/114)) ([e3a2928](https://github.com/npm/cacache/commit/e3a2928e053e19fb6e8e73946ffe3d212e402ba7))
|
||||
|
||||
## [16.1.0](https://github.com/npm/cacache/compare/v16.0.7...v16.1.0) (2022-05-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow external integrity/size source ([#110](https://github.com/npm/cacache/issues/110)) ([61785e1](https://github.com/npm/cacache/commit/61785e106765f4b44041de318f6e387d93759e60))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* move to async functions where possible ([#106](https://github.com/npm/cacache/issues/106)) ([71d4389](https://github.com/npm/cacache/commit/71d4389ff8a35330c3fedce97761094e243d4faf))
|
||||
|
||||
### [16.0.7](https://github.com/npm/cacache/compare/v16.0.6...v16.0.7) (2022-04-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **put:** don't flush if an error happened ([e870016](https://github.com/npm/cacache/commit/e8700167e036f392e5554af2d582caa17e4e7237))
|
||||
* remove disposer ([76ab648](https://github.com/npm/cacache/commit/76ab64857b6874bc54d542ddd483c526434c0b9b))
|
||||
* remove fs.copyFile checks ([90776fd](https://github.com/npm/cacache/commit/90776fd4a6c5362ea56a979b9611bdf4391e1fd8))
|
||||
|
||||
### [16.0.6](https://github.com/npm/cacache/compare/v16.0.5...v16.0.6) (2022-04-21)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* normalize win32 paths before globbing ([4bdd5d5](https://github.com/npm/cacache/commit/4bdd5d5ce21147d67a46b6d2e1ec65007b31705c))
|
||||
|
||||
### [16.0.5](https://github.com/npm/cacache/compare/v16.0.4...v16.0.5) (2022-04-20)
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* bump glob from 7.2.0 to 8.0.1 ([#98](https://github.com/npm/cacache/issues/98)) ([74a11f9](https://github.com/npm/cacache/commit/74a11f9f5a1543d593217078a5357707680e2bb1))
|
||||
|
||||
### [16.0.4](https://github.com/npm/cacache/compare/v16.0.3...v16.0.4) (2022-04-05)
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* bump @npmcli/move-file from 1.1.2 to 2.0.0 ([#94](https://github.com/npm/cacache/issues/94)) ([f3d64f6](https://github.com/npm/cacache/commit/f3d64f6c238f99433df260fe52081177bdedee86))
|
||||
* bump ssri from 8.0.1 to 9.0.0 ([#95](https://github.com/npm/cacache/issues/95)) ([fb44f5f](https://github.com/npm/cacache/commit/fb44f5f6bf85f9ee45ba52eb63088b108eca076d))
|
||||
|
||||
### [16.0.3](https://github.com/npm/cacache/compare/v16.0.2...v16.0.3) (2022-03-22)
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* bump @npmcli/fs from 1.1.1 to 2.1.0 ([#88](https://github.com/npm/cacache/issues/88)) ([9c9c91c](https://github.com/npm/cacache/commit/9c9c91ce13b941a12c73b95940c5d0b4f4dbf3d0))
|
||||
* update lru-cache requirement from ^7.5.1 to ^7.7.1 ([#87](https://github.com/npm/cacache/issues/87)) ([800079f](https://github.com/npm/cacache/commit/800079fc5fd18e624bcc53dae3b5f432033e1096))
|
||||
|
||||
### [16.0.2](https://www.github.com/npm/cacache/compare/v16.0.1...v16.0.2) (2022-03-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use lru-cache.clear ([#80](https://www.github.com/npm/cacache/issues/80)) ([a48e020](https://www.github.com/npm/cacache/commit/a48e020ca86d28a569578617cc4c7efb76aa8194))
|
||||
|
||||
### [16.0.1](https://www.github.com/npm/cacache/compare/v16.0.0...v16.0.1) (2022-03-15)
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* bump lru-cache from 6.0.0 to 7.5.1 ([#77](https://www.github.com/npm/cacache/issues/77)) ([6a3a886](https://www.github.com/npm/cacache/commit/6a3a8863f079aaccb623c4f8d933c485b82e0671))
|
||||
* update glob requirement from ^7.1.4 to ^7.2.0 ([#74](https://www.github.com/npm/cacache/issues/74)) ([27f1a63](https://www.github.com/npm/cacache/commit/27f1a63cc14de34585330c7ad50f5ae00b3b5b54))
|
||||
* update minipass requirement from ^3.1.1 to ^3.1.6 ([#76](https://www.github.com/npm/cacache/issues/76)) ([954a430](https://www.github.com/npm/cacache/commit/954a43056fd01ca3a359581dfe32cdfd0ada5f8d))
|
||||
|
||||
## [16.0.0](https://www.github.com/npm/cacache/compare/v15.3.0...v16.0.0) (2022-03-14)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* this drops support for node10 and non-LTS versions of node12 and node14.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* move files to lib ([cfa4a79](https://www.github.com/npm/cacache/commit/cfa4a7974e1a2b4c4d00613afe20b1925fbe639a))
|
||||
|
||||
|
||||
### Dependencies
|
||||
|
||||
* @npmcli/template-oss@2.9.2 ([6e051a7](https://www.github.com/npm/cacache/commit/6e051a782e18288c51914562ae93b4ce52a81ad1))
|
||||
* update @npmcli/move-file requirement from ^1.0.1 to ^1.1.2 ([#70](https://www.github.com/npm/cacache/issues/70)) ([ddf797a](https://www.github.com/npm/cacache/commit/ddf797a1906e2b285165e544d5ae29a4bb1514ef))
|
||||
* update fs-minipass requirement from ^2.0.0 to ^2.1.0 ([#72](https://www.github.com/npm/cacache/issues/72)) ([07a5aa1](https://www.github.com/npm/cacache/commit/07a5aa17a8d2245d69f613f9351d4cf84865ade8))
|
||||
* update minipass-pipeline requirement from ^1.2.2 to ^1.2.4 ([#69](https://www.github.com/npm/cacache/issues/69)) ([372d1a1](https://www.github.com/npm/cacache/commit/372d1a1533aaa8ea37c4cb98f99f40461c9bddac))
|
||||
* update mkdirp requirement from ^1.0.3 to ^1.0.4 ([#73](https://www.github.com/npm/cacache/issues/73)) ([5fbd50f](https://www.github.com/npm/cacache/commit/5fbd50f1000e8065d754a7c8c89c1c9747532618))
|
||||
* update tar requirement from ^6.0.2 to ^6.1.11 ([#71](https://www.github.com/npm/cacache/issues/71)) ([4d35625](https://www.github.com/npm/cacache/commit/4d3562565dc52fe51cc5de2fbffceddb63f65118))
|
||||
|
||||
## [15.2.0](https://github.com/npm/cacache/releases/v15.2.0) (2021-05-25)
|
||||
|
||||
* [8892a92](https://github.com/npm/cacache/commit/8892a92) add a validateEntry option to compact
|
||||
* [460b951](https://github.com/npm/cacache/commit/460b951) allow fully deleting indexes
|
||||
|
||||
## [15.1.0](https://github.com/npm/cacache/compare/v15.0.6...v15.1.0) (2021-05-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* allow formatEntry to keep entries with no integrity value ([930f531](https://github.com/npm/cacache/commit/930f5313825a84277c531defe53696b8c9f4ef70)), closes [#53](https://github.com/npm/cacache/issues/53)
|
||||
* expose index.insert, implement and expose index.compact ([c4efb74](https://github.com/npm/cacache/commit/c4efb7427cd40694933a46ef3eb59d32ce4d0eed))
|
||||
|
||||
### [15.0.6](https://github.com/npm/cacache/compare/v15.0.5...v15.0.6) (2021-03-22)
|
||||
|
||||
### [15.0.5](https://github.com/npm/cacache/compare/v15.0.4...v15.0.5) (2020-07-11)
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
|
||||
|
||||
All interactions in this repo are covered by the [npm Code of
|
||||
Conduct](https://docs.npmjs.com/policies/conduct)
|
||||
|
||||
The npm cli team may, at its own discretion, moderate, remove, or edit
|
||||
any interactions such as pull requests, issues, and comments.
|
51
README.md
51
README.md
|
@ -36,6 +36,8 @@ just as easily be used on its own.
|
|||
* [`rm.all`](#rm-all)
|
||||
* [`rm.entry`](#rm-entry)
|
||||
* [`rm.content`](#rm-content)
|
||||
* [`index.compact`](#index-compact)
|
||||
* [`index.insert`](#index-insert)
|
||||
* Utilities
|
||||
* [`clearMemoized`](#clear-memoized)
|
||||
* [`tmp.mkdir`](#tmp-mkdir)
|
||||
|
@ -411,6 +413,19 @@ with an `EINTEGRITY` error.
|
|||
|
||||
`algorithms` has no effect if this option is present.
|
||||
|
||||
##### `opts.integrityEmitter`
|
||||
|
||||
*Streaming only* If present, uses the provided event emitter as a source of
|
||||
truth for both integrity and size. This allows use cases where integrity is
|
||||
already being calculated outside of cacache to reuse that data instead of
|
||||
calculating it a second time.
|
||||
|
||||
The emitter must emit both the `'integrity'` and `'size'` events.
|
||||
|
||||
NOTE: If this option is provided, you must verify that you receive the correct
|
||||
integrity value yourself and emit an `'error'` event if there is a mismatch.
|
||||
[ssri Integrity Streams](https://github.com/npm/ssri#integrity-stream) do this for you when given an expected integrity.
|
||||
|
||||
##### `opts.algorithms`
|
||||
|
||||
Default: ['sha512']
|
||||
|
@ -456,13 +471,17 @@ cacache.rm.all(cachePath).then(() => {
|
|||
})
|
||||
```
|
||||
|
||||
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise`
|
||||
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, [opts]) -> Promise`
|
||||
|
||||
Alias: `cacache.rm`
|
||||
|
||||
Removes the index entry for `key`. Content will still be accessible if
|
||||
requested directly by content address ([`get.stream.byDigest`](#get-stream)).
|
||||
|
||||
By default, this appends a new entry to the index with an integrity of `null`.
|
||||
If `opts.removeFully` is set to `true` then the index file itself will be
|
||||
physically deleted rather than appending a `null`.
|
||||
|
||||
To remove the content itself (which might still be used by other entries), use
|
||||
[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
|
||||
[`verify`](#verify).
|
||||
|
@ -489,6 +508,34 @@ cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
|
|||
})
|
||||
```
|
||||
|
||||
#### <a name="index-compact"></a> `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise`
|
||||
|
||||
Uses `matchFn`, which must be a synchronous function that accepts two entries
|
||||
and returns a boolean indicating whether or not the two entries match, to
|
||||
deduplicate all entries in the cache for the given `key`.
|
||||
|
||||
If `opts.validateEntry` is provided, it will be called as a function with the
|
||||
only parameter being a single index entry. The function must return a Boolean,
|
||||
if it returns `true` the entry is considered valid and will be kept in the index,
|
||||
if it returns `false` the entry will be removed from the index.
|
||||
|
||||
If `opts.validateEntry` is not provided, however, every entry in the index will
|
||||
be deduplicated and kept until the first `null` integrity is reached, removing
|
||||
all entries that were written before the `null`.
|
||||
|
||||
The deduplicated list of entries is both written to the index, replacing the
|
||||
existing content, and returned in the Promise.
|
||||
|
||||
#### <a name="index-insert"></a> `> cacache.index.insert(cache, key, integrity, opts) -> Promise`
|
||||
|
||||
Writes an index entry to the cache for the given `key` without writing content.
|
||||
|
||||
It is assumed if you are using this method, you have already stored the content
|
||||
some other way and you only wish to add a new index to that content. The `metadata`
|
||||
and `size` properties are read from `opts` and used as part of the index entry.
|
||||
|
||||
Returns a Promise resolving to the newly added entry.
|
||||
|
||||
#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
|
||||
|
||||
Completely resets the in-memory entry cache.
|
||||
|
@ -554,7 +601,7 @@ See: [options](#tmp-options)
|
|||
|
||||
```javascript
|
||||
cacache.tmp.withTmp(cache, dir => {
|
||||
return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
|
||||
return fs.writeFile(path.join(dir, 'blablabla'), 'blabla contents', { encoding: 'utf8' })
|
||||
}).then(() => {
|
||||
// `dir` no longer exists
|
||||
})
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
<!-- This file is automatically added by @npmcli/template-oss. Do not edit. -->
|
||||
|
||||
GitHub takes the security of our software products and services seriously, including the open source code repositories managed through our GitHub organizations, such as [GitHub](https://github.com/GitHub).
|
||||
|
||||
If you believe you have found a security vulnerability in this GitHub-owned open source repository, you can report it to us in one of two ways.
|
||||
|
||||
If the vulnerability you have found is *not* [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) or if you do not wish to be considered for a bounty reward, please report the issue to us directly using [private vulnerability reporting](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
If the vulnerability you have found is [in scope for the GitHub Bug Bounty Program](https://bounty.github.com/#scope) and you would like for your finding to be considered for a bounty reward, please submit the vulnerability to us through [HackerOne](https://hackerone.com/github) in order to be eligible to receive a bounty award.
|
||||
|
||||
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
|
||||
|
||||
Thanks for helping make GitHub safe for everyone.
|
||||
|
|
@ -1,156 +0,0 @@
|
|||
# Change Log
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
<a name="3.5.2"></a>
|
||||
## [3.5.2](https://github.com/npm/figgy-pudding/compare/v3.5.1...v3.5.2) (2020-03-24)
|
||||
|
||||
|
||||
|
||||
<a name="3.5.1"></a>
|
||||
## [3.5.1](https://github.com/npm/figgy-pudding/compare/v3.5.0...v3.5.1) (2018-08-25)
|
||||
|
||||
|
||||
|
||||
<a name="3.5.0"></a>
|
||||
# [3.5.0](https://github.com/npm/figgy-pudding/compare/v3.4.1...v3.5.0) (2018-08-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **node:** get rid of Object.entries to add node6 support back ([074f779](https://github.com/npm/figgy-pudding/commit/074f779))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **node:** add node@10 to CI config ([78b8937](https://github.com/npm/figgy-pudding/commit/78b8937))
|
||||
|
||||
|
||||
|
||||
<a name="3.4.1"></a>
|
||||
## [3.4.1](https://github.com/npm/figgy-pudding/compare/v3.4.0...v3.4.1) (2018-08-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **forEach:** get forEach to behave like a normal forEach ([c064755](https://github.com/npm/figgy-pudding/commit/c064755))
|
||||
* **has:** get `in` keyword working right ([fafc5a8](https://github.com/npm/figgy-pudding/commit/fafc5a8))
|
||||
* **iteration:** fix and test iteration of opts.other keys ([7a76217](https://github.com/npm/figgy-pudding/commit/7a76217))
|
||||
* **iteration:** use proper args for forEach/toJSON ([974e879](https://github.com/npm/figgy-pudding/commit/974e879))
|
||||
* **proxy:** make sure proxy corner-cases work ok ([8c66e45](https://github.com/npm/figgy-pudding/commit/8c66e45))
|
||||
* **set:** fix and test the exceptions to writing ([206793b](https://github.com/npm/figgy-pudding/commit/206793b))
|
||||
|
||||
|
||||
|
||||
<a name="3.4.0"></a>
|
||||
# [3.4.0](https://github.com/npm/figgy-pudding/compare/v3.3.0...v3.4.0) (2018-08-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **iterator:** allow iteration over "other" keys ([3c53323](https://github.com/npm/figgy-pudding/commit/3c53323))
|
||||
|
||||
|
||||
|
||||
<a name="3.3.0"></a>
|
||||
# [3.3.0](https://github.com/npm/figgy-pudding/compare/v3.2.1...v3.3.0) (2018-08-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **props:** allow symbols to pass through ([97b3464](https://github.com/npm/figgy-pudding/commit/97b3464))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **pudding:** iteration and serialization support ([0aaa50d](https://github.com/npm/figgy-pudding/commit/0aaa50d))
|
||||
|
||||
|
||||
|
||||
<a name="3.2.1"></a>
|
||||
## [3.2.1](https://github.com/npm/figgy-pudding/compare/v3.2.0...v3.2.1) (2018-08-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **aliases:** make reverse aliases work correctly ([76a255e](https://github.com/npm/figgy-pudding/commit/76a255e))
|
||||
|
||||
|
||||
|
||||
<a name="3.2.0"></a>
|
||||
# [3.2.0](https://github.com/npm/figgy-pudding/compare/v3.1.0...v3.2.0) (2018-07-26)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **concat:** have concat spit out a proxy, too ([64e3495](https://github.com/npm/figgy-pudding/commit/64e3495))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **default:** pass the pudding itself to default fns ([d9d9e09](https://github.com/npm/figgy-pudding/commit/d9d9e09))
|
||||
|
||||
|
||||
|
||||
<a name="3.1.0"></a>
|
||||
# [3.1.0](https://github.com/npm/figgy-pudding/compare/v3.0.0...v3.1.0) (2018-04-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **opts:** allow direct option fetching without .get() ([ca77aad](https://github.com/npm/figgy-pudding/commit/ca77aad))
|
||||
|
||||
|
||||
|
||||
<a name="3.0.0"></a>
|
||||
# [3.0.0](https://github.com/npm/figgy-pudding/compare/v2.0.1...v3.0.0) (2018-04-06)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **ci:** oops -- forgot to update CI config ([7a40563](https://github.com/npm/figgy-pudding/commit/7a40563))
|
||||
* **get:** make provider lookup order like Object.assign ([33ff89b](https://github.com/npm/figgy-pudding/commit/33ff89b))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **concat:** add .concat() method to opts ([d310fce](https://github.com/npm/figgy-pudding/commit/d310fce))
|
||||
|
||||
|
||||
### meta
|
||||
|
||||
* drop support for node@4 and node@7 ([9f8a61c](https://github.com/npm/figgy-pudding/commit/9f8a61c))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* node@4 and node@7 are no longer supported
|
||||
* **get:** shadow order for properties in providers is reversed
|
||||
|
||||
|
||||
|
||||
<a name="2.0.1"></a>
|
||||
## [2.0.1](https://github.com/npm/figgy-pudding/compare/v2.0.0...v2.0.1) (2018-03-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **opts:** ignore non-object providers ([7b9c0f8](https://github.com/npm/figgy-pudding/commit/7b9c0f8))
|
||||
|
||||
|
||||
|
||||
<a name="2.0.0"></a>
|
||||
# [2.0.0](https://github.com/npm/figgy-pudding/compare/v1.0.0...v2.0.0) (2018-03-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **api:** overhauled API with new opt handling concept ([e6cc929](https://github.com/npm/figgy-pudding/commit/e6cc929))
|
||||
* **license:** relicense to ISC ([87479aa](https://github.com/npm/figgy-pudding/commit/87479aa))
|
||||
|
||||
|
||||
### BREAKING CHANGES
|
||||
|
||||
* **license:** the license has been changed from CC0-1.0 to ISC.
|
||||
* **api:** this is a completely different approach than previously
|
||||
used by this library. See the readme for the new API and an explanation.
|
|
@ -1,16 +0,0 @@
|
|||
ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for
|
||||
any purpose with or without fee is hereby granted, provided that the
|
||||
above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
|
||||
ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
|
||||
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,268 +0,0 @@
|
|||
# Note: pending imminent deprecation
|
||||
|
||||
**This module will be deprecated once npm v7 is released. Please do not rely
|
||||
on it more than absolutely necessary (ie, only if you are depending on
|
||||
it for use with npm v6 internal dependencies).**
|
||||
|
||||
----
|
||||
|
||||
# figgy-pudding [![npm version](https://img.shields.io/npm/v/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![license](https://img.shields.io/npm/l/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![Travis](https://img.shields.io/travis/npm/figgy-pudding.svg)](https://travis-ci.org/npm/figgy-pudding) [![Coverage Status](https://coveralls.io/repos/github/npm/figgy-pudding/badge.svg?branch=latest)](https://coveralls.io/github/npm/figgy-pudding?branch=latest)
|
||||
|
||||
[`figgy-pudding`](https://github.com/npm/figgy-pudding) is a small JavaScript
|
||||
library for managing and composing cascading options objects -- hiding what
|
||||
needs to be hidden from each layer, without having to do a lot of manual munging
|
||||
and passing of options.
|
||||
|
||||
### The God Object is Dead!
|
||||
### Now Bring Us Some Figgy Pudding!
|
||||
|
||||
## Install
|
||||
|
||||
`$ npm install figgy-pudding`
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Example](#example)
|
||||
* [Features](#features)
|
||||
* [API](#api)
|
||||
* [`figgyPudding(spec)`](#figgy-pudding)
|
||||
* [`PuddingFactory(values)`](#pudding-factory)
|
||||
* [`opts.get()`](#opts-get)
|
||||
* [`opts.concat()`](#opts-concat)
|
||||
* [`opts.toJSON()`](#opts-to-json)
|
||||
* [`opts.forEach()`](#opts-for-each)
|
||||
* [`opts[Symbol.iterator]()`](#opts-symbol-iterator)
|
||||
* [`opts.entries()`](#opts-entries)
|
||||
* [`opts.keys()`](#opts-keys)
|
||||
* [`opts.value()`](#opts-values)
|
||||
|
||||
### Example
|
||||
|
||||
```javascript
|
||||
// print-package.js
|
||||
const fetch = require('./fetch.js')
|
||||
const puddin = require('figgy-pudding')
|
||||
|
||||
const PrintOpts = puddin({
|
||||
json: { default: false }
|
||||
})
|
||||
|
||||
async function printPkg (name, opts) {
|
||||
// Expected pattern is to call this in every interface function. If `opts` is
|
||||
// not passed in, it will automatically create an (empty) object for it.
|
||||
opts = PrintOpts(opts)
|
||||
const uri = `https://registry.npmjs.com/${name}`
|
||||
const res = await fetch(uri, opts.concat({
|
||||
// Add or override any passed-in configs and pass them down.
|
||||
log: customLogger
|
||||
}))
|
||||
// The following would throw an error, because it's not in PrintOpts:
|
||||
// console.log(opts.log)
|
||||
if (opts.json) {
|
||||
return res.json()
|
||||
} else {
|
||||
return res.text()
|
||||
}
|
||||
}
|
||||
|
||||
console.log(await printPkg('figgy', {
|
||||
// Pass in *all* configs at the toplevel, as a regular object.
|
||||
json: true,
|
||||
cache: './tmp-cache'
|
||||
}))
|
||||
```
|
||||
|
||||
```javascript
|
||||
// fetch.js
|
||||
const puddin = require('figgy-pudding')
|
||||
|
||||
const FetchOpts = puddin({
|
||||
log: { default: require('npmlog') },
|
||||
cache: {}
|
||||
})
|
||||
|
||||
module.exports = async function (..., opts) {
|
||||
opts = FetchOpts(opts)
|
||||
}
|
||||
```
|
||||
|
||||
### Features
|
||||
|
||||
* hide options from layer that didn't ask for it
|
||||
* shared multi-layer options
|
||||
* make sure `opts` argument is available
|
||||
* transparent key access like normal keys, through a Proxy. No need for`.get()`!
|
||||
* default values
|
||||
* key aliases
|
||||
* arbitrary key filter functions
|
||||
* key/value iteration
|
||||
* serialization
|
||||
* 100% test coverage using `tap --100`
|
||||
|
||||
### API
|
||||
|
||||
#### <a name="figgy-pudding"></a> `> figgyPudding({ key: { default: val } | String }, [opts]) -> PuddingFactory`
|
||||
|
||||
Defines an Options constructor that can be used to collect only the needed
|
||||
options.
|
||||
|
||||
An optional `default` property for specs can be used to specify default values
|
||||
if nothing was passed in.
|
||||
|
||||
If the value for a spec is a string, it will be treated as an alias to that
|
||||
other key.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
const MyAppOpts = figgyPudding({
|
||||
lg: 'log',
|
||||
log: {
|
||||
default: () => require('npmlog')
|
||||
},
|
||||
cache: {}
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="pudding-factory"></a> `> PuddingFactory(...providers) -> FiggyPudding{}`
|
||||
|
||||
Instantiates an options object defined by `figgyPudding()`, which uses
|
||||
`providers`, in order, to find requested properties.
|
||||
|
||||
Each provider can be either a plain object, a `Map`-like object (that is, one
|
||||
with a `.get()` method) or another figgyPudding `Opts` object.
|
||||
|
||||
When nesting `Opts` objects, their properties will not become available to the
|
||||
new object, but any further nested `Opts` that reference that property _will_ be
|
||||
able to read from their grandparent, as long as they define that key. Default
|
||||
values for nested `Opts` parents will be used, if found.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
const ReqOpts = figgyPudding({
|
||||
follow: {}
|
||||
})
|
||||
|
||||
const opts = ReqOpts({
|
||||
follow: true,
|
||||
log: require('npmlog')
|
||||
})
|
||||
|
||||
opts.follow // => true
|
||||
opts.log // => Error: ReqOpts does not define `log`
|
||||
|
||||
const MoreOpts = figgyPudding({
|
||||
log: {}
|
||||
})
|
||||
MoreOpts(opts).log // => npmlog object (passed in from original plain obj)
|
||||
MoreOpts(opts).follow // => Error: MoreOpts does not define `follow`
|
||||
```
|
||||
|
||||
#### <a name="opts-get"></a> `> opts.get(key) -> Value`
|
||||
|
||||
Gets a value from the options object.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts(config)
|
||||
opts.get('foo') // value of `foo`
|
||||
opts.foo // Proxy-based access through `.get()`
|
||||
```
|
||||
|
||||
#### <a name="opts-concat"></a> `> opts.concat(...moreProviders) -> FiggyPudding{}`
|
||||
|
||||
Creates a new opts object of the same type as `opts` with additional providers.
|
||||
Providers further to the right shadow providers to the left, with properties in
|
||||
the original `opts` being shadows by the new providers.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1})
|
||||
opts.get('x') // 1
|
||||
opts.concat({x: 2}).get('x') // 2
|
||||
opts.get('x') // 1 (original opts object left intact)
|
||||
```
|
||||
|
||||
#### <a name="opts-to-json"></a> `> opts.toJSON() -> Value`
|
||||
|
||||
Converts `opts` to a plain, JSON-stringifiable JavaScript value. Used internally
|
||||
by JavaScript to get `JSON.stringify()` working.
|
||||
|
||||
Only keys that are readable by the current pudding type will be serialized.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1})
|
||||
opts.toJSON() // {x: 1}
|
||||
JSON.stringify(opts) // '{"x":1}'
|
||||
```
|
||||
|
||||
#### <a name="opts-for-each"></a> `> opts.forEach((value, key, opts) => {}, thisArg) -> undefined`
|
||||
|
||||
Iterates over the values of `opts`, limited to the keys readable by the current
|
||||
pudding type. `thisArg` will be used to set the `this` argument when calling the
|
||||
`fn`.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1, y: 2})
|
||||
opts.forEach((value, key) => console.log(key, '=', value))
|
||||
```
|
||||
|
||||
#### <a name="opts-entries"></a> `> opts.entries() -> Iterator<[[key, value], ...]>`
|
||||
|
||||
Returns an iterator that iterates over the keys and values in `opts`, limited to
|
||||
the keys readable by the current pudding type. Each iteration returns an array
|
||||
of `[key, value]`.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1, y: 2})
|
||||
[...opts({x: 1, y: 2}).entries()] // [['x', 1], ['y', 2]]
|
||||
```
|
||||
|
||||
#### <a name="opts-symbol-iterator"></a> `> opts[Symbol.iterator]() -> Iterator<[[key, value], ...]>`
|
||||
|
||||
Returns an iterator that iterates over the keys and values in `opts`, limited to
|
||||
the keys readable by the current pudding type. Each iteration returns an array
|
||||
of `[key, value]`. Makes puddings work natively with JS iteration mechanisms.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1, y: 2})
|
||||
[...opts({x: 1, y: 2})] // [['x', 1], ['y', 2]]
|
||||
for (let [key, value] of opts({x: 1, y: 2})) {
|
||||
console.log(key, '=', value)
|
||||
}
|
||||
```
|
||||
|
||||
#### <a name="opts-keys"></a> `> opts.keys() -> Iterator<[key, ...]>`
|
||||
|
||||
Returns an iterator that iterates over the keys in `opts`, limited to the keys
|
||||
readable by the current pudding type.
|
||||
|
||||
##### Example
|
||||
|
||||
```js
|
||||
const opts = MyOpts({x: 1, y: 2})
|
||||
[...opts({x: 1, y: 2}).keys()] // ['x', 'y']
|
||||
```
|
||||
|
||||
#### <a name="opts-values"></a> `> opts.values() -> Iterator<[value, ...]>`
|
||||
|
||||
Returns an iterator that iterates over the values in `opts`, limited to the keys
|
||||
readable by the current pudding type.
|
||||
|
||||
##### Example
|
||||
'
|
||||
```js
|
||||
const opts = MyOpts({x: 1, y: 2})
|
||||
[...opts({x: 1, y: 2}).values()] // [1, 2]
|
||||
```
|
|
@ -1,197 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
class FiggyPudding {
|
||||
constructor (specs, opts, providers) {
|
||||
this.__specs = specs || {}
|
||||
Object.keys(this.__specs).forEach(alias => {
|
||||
if (typeof this.__specs[alias] === 'string') {
|
||||
const key = this.__specs[alias]
|
||||
const realSpec = this.__specs[key]
|
||||
if (realSpec) {
|
||||
const aliasArr = realSpec.aliases || []
|
||||
aliasArr.push(alias, key)
|
||||
realSpec.aliases = [...(new Set(aliasArr))]
|
||||
this.__specs[alias] = realSpec
|
||||
} else {
|
||||
throw new Error(`Alias refers to invalid key: ${key} -> ${alias}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
this.__opts = opts || {}
|
||||
this.__providers = reverse((providers).filter(
|
||||
x => x != null && typeof x === 'object'
|
||||
))
|
||||
this.__isFiggyPudding = true
|
||||
}
|
||||
get (key) {
|
||||
return pudGet(this, key, true)
|
||||
}
|
||||
get [Symbol.toStringTag] () { return 'FiggyPudding' }
|
||||
forEach (fn, thisArg = this) {
|
||||
for (let [key, value] of this.entries()) {
|
||||
fn.call(thisArg, value, key, this)
|
||||
}
|
||||
}
|
||||
toJSON () {
|
||||
const obj = {}
|
||||
this.forEach((val, key) => {
|
||||
obj[key] = val
|
||||
})
|
||||
return obj
|
||||
}
|
||||
* entries (_matcher) {
|
||||
for (let key of Object.keys(this.__specs)) {
|
||||
yield [key, this.get(key)]
|
||||
}
|
||||
const matcher = _matcher || this.__opts.other
|
||||
if (matcher) {
|
||||
const seen = new Set()
|
||||
for (let p of this.__providers) {
|
||||
const iter = p.entries ? p.entries(matcher) : entries(p)
|
||||
for (let [key, val] of iter) {
|
||||
if (matcher(key) && !seen.has(key)) {
|
||||
seen.add(key)
|
||||
yield [key, val]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
* [Symbol.iterator] () {
|
||||
for (let [key, value] of this.entries()) {
|
||||
yield [key, value]
|
||||
}
|
||||
}
|
||||
* keys () {
|
||||
for (let [key] of this.entries()) {
|
||||
yield key
|
||||
}
|
||||
}
|
||||
* values () {
|
||||
for (let [, value] of this.entries()) {
|
||||
yield value
|
||||
}
|
||||
}
|
||||
concat (...moreConfig) {
|
||||
return new Proxy(new FiggyPudding(
|
||||
this.__specs,
|
||||
this.__opts,
|
||||
reverse(this.__providers).concat(moreConfig)
|
||||
), proxyHandler)
|
||||
}
|
||||
}
|
||||
try {
|
||||
const util = require('util')
|
||||
FiggyPudding.prototype[util.inspect.custom] = function (depth, opts) {
|
||||
return (
|
||||
this[Symbol.toStringTag] + ' '
|
||||
) + util.inspect(this.toJSON(), opts)
|
||||
}
|
||||
} catch (e) {}
|
||||
|
||||
function BadKeyError (key) {
|
||||
throw Object.assign(new Error(
|
||||
`invalid config key requested: ${key}`
|
||||
), {code: 'EBADKEY'})
|
||||
}
|
||||
|
||||
function pudGet (pud, key, validate) {
|
||||
let spec = pud.__specs[key]
|
||||
if (validate && !spec && (!pud.__opts.other || !pud.__opts.other(key))) {
|
||||
BadKeyError(key)
|
||||
} else {
|
||||
if (!spec) { spec = {} }
|
||||
let ret
|
||||
for (let p of pud.__providers) {
|
||||
ret = tryGet(key, p)
|
||||
if (ret === undefined && spec.aliases && spec.aliases.length) {
|
||||
for (let alias of spec.aliases) {
|
||||
if (alias === key) { continue }
|
||||
ret = tryGet(alias, p)
|
||||
if (ret !== undefined) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ret !== undefined) {
|
||||
break
|
||||
}
|
||||
}
|
||||
if (ret === undefined && spec.default !== undefined) {
|
||||
if (typeof spec.default === 'function') {
|
||||
return spec.default(pud)
|
||||
} else {
|
||||
return spec.default
|
||||
}
|
||||
} else {
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function tryGet (key, p) {
|
||||
let ret
|
||||
if (p.__isFiggyPudding) {
|
||||
ret = pudGet(p, key, false)
|
||||
} else if (typeof p.get === 'function') {
|
||||
ret = p.get(key)
|
||||
} else {
|
||||
ret = p[key]
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
const proxyHandler = {
|
||||
has (obj, prop) {
|
||||
return prop in obj.__specs && pudGet(obj, prop, false) !== undefined
|
||||
},
|
||||
ownKeys (obj) {
|
||||
return Object.keys(obj.__specs)
|
||||
},
|
||||
get (obj, prop) {
|
||||
if (
|
||||
typeof prop === 'symbol' ||
|
||||
prop.slice(0, 2) === '__' ||
|
||||
prop in FiggyPudding.prototype
|
||||
) {
|
||||
return obj[prop]
|
||||
}
|
||||
return obj.get(prop)
|
||||
},
|
||||
set (obj, prop, value) {
|
||||
if (
|
||||
typeof prop === 'symbol' ||
|
||||
prop.slice(0, 2) === '__'
|
||||
) {
|
||||
obj[prop] = value
|
||||
return true
|
||||
} else {
|
||||
throw new Error('figgyPudding options cannot be modified. Use .concat() instead.')
|
||||
}
|
||||
},
|
||||
deleteProperty () {
|
||||
throw new Error('figgyPudding options cannot be deleted. Use .concat() and shadow them instead.')
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = figgyPudding
|
||||
function figgyPudding (specs, opts) {
|
||||
function factory (...providers) {
|
||||
return new Proxy(new FiggyPudding(
|
||||
specs,
|
||||
opts,
|
||||
providers
|
||||
), proxyHandler)
|
||||
}
|
||||
return factory
|
||||
}
|
||||
|
||||
function reverse (arr) {
|
||||
const ret = []
|
||||
arr.forEach(x => ret.unshift(x))
|
||||
return ret
|
||||
}
|
||||
|
||||
function entries (obj) {
|
||||
return Object.keys(obj).map(k => [k, obj[k]])
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
{
|
||||
"name": "figgy-pudding",
|
||||
"version": "3.5.2",
|
||||
"description": "Delicious, festive, cascading config/opts definitions",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"*.js",
|
||||
"lib"
|
||||
],
|
||||
"scripts": {
|
||||
"prerelease": "npm t",
|
||||
"postrelease": "npm publish && git push --follow-tags",
|
||||
"pretest": "standard",
|
||||
"release": "standard-version -s",
|
||||
"test": "tap -J --100 --coverage test/*.js"
|
||||
},
|
||||
"repository": "https://github.com/npm/figgy-pudding",
|
||||
"keywords": [
|
||||
"config",
|
||||
"options",
|
||||
"yummy"
|
||||
],
|
||||
"author": "Kat Marchán <kzm@sykosomatic.org>",
|
||||
"license": "ISC",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"standard": "^11.0.1",
|
||||
"standard-version": "^4.4.0",
|
||||
"tap": "^12.0.1"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
allow:
|
||||
- dependency-type: direct
|
||||
versioning-strategy: increase-if-necessary
|
||||
commit-message:
|
||||
prefix: deps
|
||||
prefix-development: chore
|
||||
labels:
|
||||
- "dependencies"
|
|
@ -0,0 +1,36 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# every sunday at noon
|
||||
- cron: 0 12 * * 0
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- run: npm i --prefer-online -g npm@latest
|
||||
- run: npm i --prefer-online
|
||||
- run: npm run lint
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x, 14.x, 16.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: npm i --prefer-online -g npm@latest
|
||||
- run: npm i --prefer-online
|
||||
- run: npm test
|
|
@ -0,0 +1,2 @@
|
|||
node_modules
|
||||
package-lock.json
|
|
@ -0,0 +1,10 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright © 2020-2022 Michael Garvin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# @gar/promisify
|
||||
|
||||
### Promisify an entire object or class instance
|
||||
|
||||
This module leverages es6 Proxy and Reflect to promisify every function in an
|
||||
object or class instance.
|
||||
|
||||
It assumes the callback that the function is expecting is the last
|
||||
parameter, and that it is an error-first callback with only one value,
|
||||
i.e. `(err, value) => ...`. This mirrors node's `util.promisify` method.
|
||||
|
||||
In order that you can use it as a one-stop-shop for all your promisify
|
||||
needs, you can also pass it a function. That function will be
|
||||
promisified as normal using node's built-in `util.promisify` method.
|
||||
|
||||
[node's custom promisified
|
||||
functions](https://nodejs.org/api/util.html#util_custom_promisified_functions)
|
||||
will also be mirrored, further allowing this to be a drop-in replacement
|
||||
for the built-in `util.promisify`.
|
||||
|
||||
### Examples
|
||||
|
||||
Promisify an entire object
|
||||
|
||||
```javascript
|
||||
|
||||
const promisify = require('@gar/promisify')
|
||||
|
||||
class Foo {
|
||||
constructor (attr) {
|
||||
this.attr = attr
|
||||
}
|
||||
|
||||
double (input, cb) {
|
||||
cb(null, input * 2)
|
||||
}
|
||||
|
||||
const foo = new Foo('baz')
|
||||
const promisified = promisify(foo)
|
||||
|
||||
console.log(promisified.attr)
|
||||
console.log(await promisified.double(1024))
|
||||
```
|
||||
|
||||
Promisify a function
|
||||
|
||||
```javascript
|
||||
|
||||
const promisify = require('@gar/promisify')
|
||||
|
||||
function foo (a, cb) {
|
||||
if (a !== 'bad') {
|
||||
return cb(null, 'ok')
|
||||
}
|
||||
return cb('not ok')
|
||||
}
|
||||
|
||||
const promisified = promisify(foo)
|
||||
|
||||
// This will resolve to 'ok'
|
||||
promisified('good')
|
||||
|
||||
// this will reject
|
||||
promisified('bad')
|
||||
```
|
|
@ -0,0 +1,36 @@
|
|||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
|
||||
const handler = {
|
||||
get: function (target, prop, receiver) {
|
||||
if (typeof target[prop] !== 'function') {
|
||||
return target[prop]
|
||||
}
|
||||
if (target[prop][promisify.custom]) {
|
||||
return function () {
|
||||
return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments)
|
||||
}
|
||||
}
|
||||
return function () {
|
||||
return new Promise((resolve, reject) => {
|
||||
Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve(result)
|
||||
}])
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function (thingToPromisify) {
|
||||
if (typeof thingToPromisify === 'function') {
|
||||
return promisify(thingToPromisify)
|
||||
}
|
||||
if (typeof thingToPromisify === 'object') {
|
||||
return new Proxy(thingToPromisify, handler)
|
||||
}
|
||||
throw new TypeError('Can only promisify functions or objects')
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"name": "@gar/promisify",
|
||||
"version": "1.1.3",
|
||||
"description": "Promisify an entire class or object",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/wraithgar/gar-promisify.git"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"lint:fix": "standard --fix",
|
||||
"test": "lab -a @hapi/code -t 100",
|
||||
"posttest": "npm run lint"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"promisify",
|
||||
"all",
|
||||
"class",
|
||||
"object"
|
||||
],
|
||||
"author": "Gar <gar+npm@danger.computer>",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@hapi/code": "^8.0.1",
|
||||
"@hapi/lab": "^24.1.0",
|
||||
"standard": "^16.0.3"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
'use strict'
|
||||
|
||||
const lab = (exports.lab = require('@hapi/lab').script())
|
||||
const { describe, it } = lab
|
||||
const { expect } = require('@hapi/code')
|
||||
const util = require('util')
|
||||
|
||||
const promisify = require('../')
|
||||
|
||||
class Fixture {
|
||||
constructor (attr) {
|
||||
this.attr = attr
|
||||
this.custom[util.promisify.custom] = function (input1, input2) {
|
||||
return Promise.resolve([this.attr, input1, input2])
|
||||
}
|
||||
}
|
||||
|
||||
single (input, cb) {
|
||||
cb(null, [this.attr, input])
|
||||
}
|
||||
|
||||
custom (input1, input2, cb) {
|
||||
cb(null, this.attr, input1, input2)
|
||||
}
|
||||
|
||||
error (input, cb) {
|
||||
cb(new Error(input))
|
||||
}
|
||||
}
|
||||
|
||||
it('requires a function or object', () => {
|
||||
const throws = () => {
|
||||
promisify('string')
|
||||
}
|
||||
expect(throws).to.throw(TypeError)
|
||||
})
|
||||
|
||||
describe('promisify object', () => {
|
||||
it('non function attribute', () => {
|
||||
const instance = new Fixture('test')
|
||||
const promisified = promisify(instance)
|
||||
expect(promisified.attr).to.equal('test')
|
||||
})
|
||||
|
||||
it('custom promisify', async () => {
|
||||
const instance = new Fixture('test')
|
||||
const promisified = promisify(instance)
|
||||
const custom = await promisified.custom('test one', 'test two')
|
||||
expect(custom).to.equal(['test', 'test one', 'test two'])
|
||||
})
|
||||
|
||||
it('callback success', async () => {
|
||||
const instance = new Fixture('test')
|
||||
const promisified = promisify(instance)
|
||||
const single = await promisified.single('test single')
|
||||
expect(single).to.equal(['test', 'test single'])
|
||||
})
|
||||
|
||||
it('callback success', async () => {
|
||||
const instance = new Fixture('test')
|
||||
const promisified = promisify(instance)
|
||||
const rejects = function () {
|
||||
return promisified.error('test error')
|
||||
}
|
||||
expect(rejects()).to.reject(Error, 'test error')
|
||||
})
|
||||
})
|
||||
|
||||
describe('promisify function', () => {
|
||||
it('promisifies a function', async () => {
|
||||
const fn = (a, cb) => cb(null, a)
|
||||
const promisified = promisify(fn)
|
||||
const result = await promisified('test')
|
||||
expect(result).to.equal('test')
|
||||
})
|
||||
|
||||
it('assumes error first callback', async () => {
|
||||
const fn = (a, cb) => cb(new Error('test error'), a)
|
||||
const promisified = promisify(fn)
|
||||
const result = promisified('test')
|
||||
expect(result).to.reject('test error')
|
||||
})
|
||||
})
|
255
get.js
255
get.js
|
@ -1,255 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
const fs = require('fs')
|
||||
const index = require('./lib/entry-index')
|
||||
const memo = require('./lib/memoization')
|
||||
const read = require('./lib/content/read')
|
||||
|
||||
const Minipass = require('minipass')
|
||||
const Collect = require('minipass-collect')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
|
||||
module.exports = function get (cache, key, opts) {
|
||||
return getData(false, cache, key, opts)
|
||||
}
|
||||
module.exports.byDigest = function getByDigest (cache, digest, opts) {
|
||||
return getData(true, cache, digest, opts)
|
||||
}
|
||||
|
||||
function getData (byDigest, cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = byDigest
|
||||
? memo.get.byDigest(cache, key, opts)
|
||||
: memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(
|
||||
byDigest
|
||||
? memoized
|
||||
: {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size
|
||||
}
|
||||
)
|
||||
}
|
||||
return (byDigest ? Promise.resolve(null) : index.find(cache, key, opts)).then(
|
||||
(entry) => {
|
||||
if (!entry && !byDigest) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
return read(cache, byDigest ? key : entry.integrity, {
|
||||
integrity,
|
||||
size
|
||||
})
|
||||
.then((data) =>
|
||||
byDigest
|
||||
? data
|
||||
: {
|
||||
data,
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity
|
||||
}
|
||||
)
|
||||
.then((res) => {
|
||||
if (memoize && byDigest) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
} else if (memoize) {
|
||||
memo.put(cache, entry, res.data, opts)
|
||||
}
|
||||
return res
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.sync = function get (cache, key, opts) {
|
||||
return getDataSync(false, cache, key, opts)
|
||||
}
|
||||
module.exports.sync.byDigest = function getByDigest (cache, digest, opts) {
|
||||
return getDataSync(true, cache, digest, opts)
|
||||
}
|
||||
|
||||
function getDataSync (byDigest, cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = byDigest
|
||||
? memo.get.byDigest(cache, key, opts)
|
||||
: memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return byDigest
|
||||
? memoized
|
||||
: {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size
|
||||
}
|
||||
}
|
||||
const entry = !byDigest && index.find.sync(cache, key, opts)
|
||||
if (!entry && !byDigest) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = read.sync(cache, byDigest ? key : entry.integrity, {
|
||||
integrity: integrity,
|
||||
size: size
|
||||
})
|
||||
const res = byDigest
|
||||
? data
|
||||
: {
|
||||
metadata: entry.metadata,
|
||||
data: data,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity
|
||||
}
|
||||
if (memoize && byDigest) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
} else if (memoize) {
|
||||
memo.put(cache, entry, res.data, opts)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
module.exports.stream = getStream
|
||||
|
||||
const getMemoizedStream = (memoized) => {
|
||||
const stream = new Minipass()
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(memoized.entry.metadata)
|
||||
ev === 'integrity' && cb(memoized.entry.integrity)
|
||||
ev === 'size' && cb(memoized.entry.size)
|
||||
})
|
||||
stream.end(memoized.data)
|
||||
return stream
|
||||
}
|
||||
|
||||
function getStream (cache, key, opts = {}) {
|
||||
const { memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return getMemoizedStream(memoized)
|
||||
}
|
||||
|
||||
const stream = new Pipeline()
|
||||
index
|
||||
.find(cache, key)
|
||||
.then((entry) => {
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
stream.emit('metadata', entry.metadata)
|
||||
stream.emit('integrity', entry.integrity)
|
||||
stream.emit('size', entry.size)
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(entry.metadata)
|
||||
ev === 'integrity' && cb(entry.integrity)
|
||||
ev === 'size' && cb(entry.size)
|
||||
})
|
||||
|
||||
const src = read.readStream(
|
||||
cache,
|
||||
entry.integrity,
|
||||
{ ...opts, size: typeof size !== 'number' ? entry.size : size }
|
||||
)
|
||||
|
||||
if (memoize) {
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put(cache, entry, data, opts))
|
||||
stream.unshift(memoStream)
|
||||
}
|
||||
stream.unshift(src)
|
||||
})
|
||||
.catch((err) => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream.byDigest = getStreamDigest
|
||||
|
||||
function getStreamDigest (cache, integrity, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get.byDigest(cache, integrity, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
const stream = new Minipass()
|
||||
stream.end(memoized)
|
||||
return stream
|
||||
} else {
|
||||
const stream = read.readStream(cache, integrity, opts)
|
||||
if (!memoize) {
|
||||
return stream
|
||||
}
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put.byDigest(
|
||||
cache,
|
||||
integrity,
|
||||
data,
|
||||
opts
|
||||
))
|
||||
return new Pipeline(stream, memoStream)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.info = info
|
||||
|
||||
function info (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(memoized.entry)
|
||||
} else {
|
||||
return index.find(cache, key)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.hasContent = read.hasContent
|
||||
|
||||
function cp (cache, key, dest, opts) {
|
||||
return copy(false, cache, key, dest, opts)
|
||||
}
|
||||
|
||||
module.exports.copy = cp
|
||||
|
||||
function cpDigest (cache, digest, dest, opts) {
|
||||
return copy(true, cache, digest, dest, opts)
|
||||
}
|
||||
|
||||
module.exports.copy.byDigest = cpDigest
|
||||
|
||||
function copy (byDigest, cache, key, dest, opts = {}) {
|
||||
if (read.copy) {
|
||||
return (byDigest
|
||||
? Promise.resolve(null)
|
||||
: index.find(cache, key, opts)
|
||||
).then((entry) => {
|
||||
if (!entry && !byDigest) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
return read
|
||||
.copy(cache, byDigest ? key : entry.integrity, dest, opts)
|
||||
.then(() => {
|
||||
return byDigest
|
||||
? key
|
||||
: {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return getData(byDigest, cache, key, opts).then((res) => {
|
||||
return writeFile(dest, byDigest ? res : res.data).then(() => {
|
||||
return byDigest
|
||||
? key
|
||||
: {
|
||||
metadata: res.metadata,
|
||||
size: res.size,
|
||||
integrity: res.integrity
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
|
@ -1,123 +1,99 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const fs = require('fs')
|
||||
const fs = require('fs/promises')
|
||||
const fsm = require('fs-minipass')
|
||||
const ssri = require('ssri')
|
||||
const contentPath = require('./path')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const lstat = util.promisify(fs.lstat)
|
||||
const readFile = util.promisify(fs.readFile)
|
||||
|
||||
module.exports = read
|
||||
|
||||
const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
|
||||
function read (cache, integrity, opts = {}) {
|
||||
async function read (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
return lstat(cpath).then(stat => ({ stat, cpath, sri }))
|
||||
}).then(({ stat, cpath, sri }) => {
|
||||
if (typeof size === 'number' && stat.size !== size) {
|
||||
throw sizeError(size, stat.size)
|
||||
}
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
return readFile(cpath, null).then((data) => {
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
return data
|
||||
})
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && stat.size !== size) {
|
||||
throw sizeError(size, stat.size)
|
||||
}
|
||||
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
const data = await fs.readFile(cpath, { encoding: null })
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const readPipeline = (cpath, size, sri, stream) => {
|
||||
stream.push(
|
||||
new fsm.ReadStream(cpath, {
|
||||
size,
|
||||
readSize: MAX_SINGLE_READ_SIZE
|
||||
readSize: MAX_SINGLE_READ_SIZE,
|
||||
}),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size
|
||||
size,
|
||||
})
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.sync = readSync
|
||||
|
||||
function readSync (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
const data = fs.readFileSync(cpath)
|
||||
if (typeof size === 'number' && size !== data.length) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
}
|
||||
|
||||
throw integrityError(sri, cpath)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
|
||||
function readStream (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const stream = new Pipeline()
|
||||
withContentSri(cache, integrity, (cpath, sri) => {
|
||||
// just lstat to ensure it exists
|
||||
return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
|
||||
}).then(({ stat, cpath, sri }) => {
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// just stat to ensure it exists
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && size !== stat.size) {
|
||||
return stream.emit('error', sizeError(size, stat.size))
|
||||
}
|
||||
readPipeline(cpath, stat.size, sri, stream)
|
||||
}, er => stream.emit('error', er))
|
||||
|
||||
return readPipeline(cpath, stat.size, sri, stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
let copyFile
|
||||
if (fs.copyFile) {
|
||||
module.exports.copy = copy
|
||||
module.exports.copy.sync = copySync
|
||||
copyFile = util.promisify(fs.copyFile)
|
||||
}
|
||||
module.exports.copy = copy
|
||||
|
||||
function copy (cache, integrity, dest) {
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
function copySync (cache, integrity, dest) {
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFileSync(cpath, dest)
|
||||
return fs.copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
|
||||
function hasContent (cache, integrity) {
|
||||
async function hasContent (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return Promise.resolve(false)
|
||||
return false
|
||||
}
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
|
||||
}).catch((err) => {
|
||||
|
||||
try {
|
||||
return await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
const stat = await fs.stat(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
|
@ -126,114 +102,54 @@ function hasContent (cache, integrity) {
|
|||
return false
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent.sync = hasContentSync
|
||||
|
||||
function hasContentSync (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
try {
|
||||
const stat = fs.lstatSync(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function withContentSri (cache, integrity, fn) {
|
||||
const tryFn = () => {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
// Can't use race here because a generic error can happen before a ENOENT error, and can happen before a valid result
|
||||
return Promise
|
||||
.all(digests.map((meta) => {
|
||||
return withContentSri(cache, meta, fn)
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
})
|
||||
}))
|
||||
.then((results) => {
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
tryFn()
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function withContentSriSync (cache, integrity, fn) {
|
||||
async function withContentSri (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
let lastErr = null
|
||||
for (const meta of digests) {
|
||||
// Can't use race here because a generic error can happen before
|
||||
// a ENOENT error, and can happen before a valid result
|
||||
const results = await Promise.all(digests.map(async (meta) => {
|
||||
try {
|
||||
return withContentSriSync(cache, meta, fn)
|
||||
return await withContentSri(cache, meta, fn)
|
||||
} catch (err) {
|
||||
lastErr = err
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
}
|
||||
}))
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
throw lastErr
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const fs = require('fs/promises')
|
||||
const contentPath = require('./path')
|
||||
const { hasContent } = require('./read')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
|
||||
module.exports = rm
|
||||
|
||||
function rm (cache, integrity) {
|
||||
return hasContent(cache, integrity).then((content) => {
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
return rimraf(contentPath(cache, content.sri)).then(() => true)
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
})
|
||||
async function rm (cache, integrity) {
|
||||
const content = await hasContent(cache, integrity)
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,44 +1,45 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
const events = require('events')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fixOwner = require('../util/fix-owner')
|
||||
const fs = require('fs')
|
||||
const fs = require('fs/promises')
|
||||
const moveFile = require('../util/move-file')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
const Flush = require('minipass-flush')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const { disposer } = require('./../util/disposer')
|
||||
const fsm = require('fs-minipass')
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
|
||||
module.exports = write
|
||||
|
||||
function write (cache, data, opts = {}) {
|
||||
async function write (cache, data, opts = {}) {
|
||||
const { algorithms, size, integrity } = opts
|
||||
if (algorithms && algorithms.length > 1) {
|
||||
throw new Error('opts.algorithms only supports a single algorithm for now')
|
||||
}
|
||||
|
||||
if (typeof size === 'number' && data.length !== size) {
|
||||
return Promise.reject(sizeError(size, data.length))
|
||||
}
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
return Promise.reject(checksumError(integrity, sri))
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
return disposer(makeTmp(cache, opts), makeTmpDisposer,
|
||||
(tmp) => {
|
||||
return writeFile(tmp.target, data, { flag: 'wx' })
|
||||
.then(() => moveToDestination(tmp, cache, sri, opts))
|
||||
})
|
||||
.then(() => ({ integrity: sri, size: data.length }))
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
throw checksumError(integrity, sri)
|
||||
}
|
||||
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
await fs.writeFile(tmp.target, data, { flag: 'wx' })
|
||||
await moveToDestination(tmp, cache, sri, opts)
|
||||
return { integrity: sri, size: data.length }
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await fs.rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
|
@ -76,9 +77,11 @@ class CacacheWriteStream extends Flush {
|
|||
// defer this one tick by rejecting a promise on it.
|
||||
return Promise.reject(e).catch(cb)
|
||||
}
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
this.handleContentP.then(
|
||||
(res) => {
|
||||
res.integrity && this.emit('integrity', res.integrity)
|
||||
// eslint-disable-next-line promise/always-return
|
||||
res.size !== null && this.emit('size', res.size)
|
||||
cb()
|
||||
},
|
||||
|
@ -92,80 +95,78 @@ function writeStream (cache, opts = {}) {
|
|||
return new CacacheWriteStream(cache, opts)
|
||||
}
|
||||
|
||||
function handleContent (inputStream, cache, opts) {
|
||||
return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => {
|
||||
return pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
.then((res) => {
|
||||
return moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
).then(() => res)
|
||||
})
|
||||
})
|
||||
async function handleContent (inputStream, cache, opts) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
await moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
)
|
||||
return res
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await fs.rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx',
|
||||
})
|
||||
|
||||
if (opts.integrityEmitter) {
|
||||
// we need to create these all simultaneously since they can fire in any order
|
||||
const [integrity, size] = await Promise.all([
|
||||
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
|
||||
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
|
||||
new Pipeline(inputStream, outStream).promise(),
|
||||
])
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size
|
||||
size: opts.size,
|
||||
})
|
||||
hashStream.on('integrity', i => { integrity = i })
|
||||
hashStream.on('size', s => { size = s })
|
||||
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx'
|
||||
hashStream.on('integrity', i => {
|
||||
integrity = i
|
||||
})
|
||||
hashStream.on('size', s => {
|
||||
size = s
|
||||
})
|
||||
|
||||
// NB: this can throw if the hashStream has a problem with
|
||||
// it, and the data is fully written. but pipeToTmp is only
|
||||
// called in promisory contexts where that is handled.
|
||||
const pipeline = new Pipeline(
|
||||
inputStream,
|
||||
hashStream,
|
||||
outStream
|
||||
)
|
||||
|
||||
return pipeline.promise()
|
||||
.then(() => ({ integrity, size }))
|
||||
.catch(er => rimraf(tmpTarget).then(() => { throw er }))
|
||||
const pipeline = new Pipeline(inputStream, hashStream, outStream)
|
||||
await pipeline.promise()
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
function makeTmp (cache, opts) {
|
||||
async function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({
|
||||
await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
|
||||
return {
|
||||
target: tmpTarget,
|
||||
moved: false
|
||||
}))
|
||||
}
|
||||
|
||||
function makeTmpDisposer (tmp) {
|
||||
if (tmp.moved) {
|
||||
return Promise.resolve()
|
||||
moved: false,
|
||||
}
|
||||
return rimraf(tmp.target)
|
||||
}
|
||||
|
||||
function moveToDestination (tmp, cache, sri, opts) {
|
||||
async function moveToDestination (tmp, cache, sri, opts) {
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
|
||||
return fixOwner
|
||||
.mkdirfix(cache, destDir)
|
||||
.then(() => {
|
||||
return moveFile(tmp.target, destination)
|
||||
})
|
||||
.then(() => {
|
||||
tmp.moved = true
|
||||
return fixOwner.chownr(cache, destination)
|
||||
})
|
||||
await fs.mkdir(destDir, { recursive: true })
|
||||
await moveFile(tmp.target, destination)
|
||||
tmp.moved = true
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const crypto = require('crypto')
|
||||
const fs = require('fs')
|
||||
const {
|
||||
appendFile,
|
||||
mkdir,
|
||||
readFile,
|
||||
readdir,
|
||||
rm,
|
||||
writeFile,
|
||||
} = require('fs/promises')
|
||||
const Minipass = require('minipass')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const hashToSegments = require('./util/hash-to-segments')
|
||||
const indexV = require('../package.json')['cache-version'].index
|
||||
|
||||
const appendFile = util.promisify(fs.appendFile)
|
||||
const readFile = util.promisify(fs.readFile)
|
||||
const readdir = util.promisify(fs.readdir)
|
||||
const { moveFile } = require('@npmcli/fs')
|
||||
|
||||
module.exports.NotFoundError = class NotFoundError extends Error {
|
||||
constructor (cache, key) {
|
||||
|
@ -25,51 +28,87 @@ module.exports.NotFoundError = class NotFoundError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports.insert = insert
|
||||
module.exports.compact = compact
|
||||
|
||||
function insert (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
async function compact (cache, key, matchFn, opts = {}) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata
|
||||
const entries = await bucketEntries(bucket)
|
||||
const newEntries = []
|
||||
// we loop backwards because the bottom-most result is the newest
|
||||
// since we add new entries with appendFile
|
||||
for (let i = entries.length - 1; i >= 0; --i) {
|
||||
const entry = entries[i]
|
||||
// a null integrity could mean either a delete was appended
|
||||
// or the user has simply stored an index that does not map
|
||||
// to any content. we determine if the user wants to keep the
|
||||
// null integrity based on the validateEntry function passed in options.
|
||||
// if the integrity is null and no validateEntry is provided, we break
|
||||
// as we consider the null integrity to be a deletion of everything
|
||||
// that came before it.
|
||||
if (entry.integrity === null && !opts.validateEntry) {
|
||||
break
|
||||
}
|
||||
|
||||
// if this entry is valid, and it is either the first entry or
|
||||
// the newEntries array doesn't already include an entry that
|
||||
// matches this one based on the provided matchFn, then we add
|
||||
// it to the beginning of our list
|
||||
if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
|
||||
(newEntries.length === 0 ||
|
||||
!newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
|
||||
newEntries.unshift(entry)
|
||||
}
|
||||
}
|
||||
return fixOwner
|
||||
.mkdirfix(cache, path.dirname(bucket))
|
||||
.then(() => {
|
||||
const stringified = JSON.stringify(entry)
|
||||
// NOTE - Cleverness ahoy!
|
||||
//
|
||||
// This works because it's tremendously unlikely for an entry to corrupt
|
||||
// another while still preserving the string length of the JSON in
|
||||
// question. So, we just slap the length in there and verify it on read.
|
||||
//
|
||||
// Thanks to @isaacs for the whiteboarding session that ended up with this.
|
||||
return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
})
|
||||
.then(() => fixOwner.chownr(cache, bucket))
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
throw err
|
||||
// There's a class of race conditions that happen when things get deleted
|
||||
// during fixOwner, or between the two mkdirfix/chownr calls.
|
||||
//
|
||||
// It's perfectly fine to just not bother in those cases and lie
|
||||
// that the index entry was written. Because it's a cache.
|
||||
})
|
||||
.then(() => {
|
||||
return formatEntry(cache, entry)
|
||||
})
|
||||
|
||||
const newIndex = '\n' + newEntries.map((entry) => {
|
||||
const stringified = JSON.stringify(entry)
|
||||
const hash = hashEntry(stringified)
|
||||
return `${hash}\t${stringified}`
|
||||
}).join('\n')
|
||||
|
||||
const setup = async () => {
|
||||
const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await mkdir(path.dirname(target), { recursive: true })
|
||||
return {
|
||||
target,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
const teardown = async (tmp) => {
|
||||
if (!tmp.moved) {
|
||||
return rm(tmp.target, { recursive: true, force: true })
|
||||
}
|
||||
}
|
||||
|
||||
const write = async (tmp) => {
|
||||
await writeFile(tmp.target, newIndex, { flag: 'wx' })
|
||||
await mkdir(path.dirname(bucket), { recursive: true })
|
||||
// we use @npmcli/move-file directly here because we
|
||||
// want to overwrite the existing file
|
||||
await moveFile(tmp.target, bucket)
|
||||
tmp.moved = true
|
||||
}
|
||||
|
||||
// write the file atomically
|
||||
const tmp = await setup()
|
||||
try {
|
||||
await write(tmp)
|
||||
} finally {
|
||||
await teardown(tmp)
|
||||
}
|
||||
|
||||
// we reverse the list we generated such that the newest
|
||||
// entries come first in order to make looping through them easier
|
||||
// the true passed to formatEntry tells it to keep null
|
||||
// integrity values, if they made it this far it's because
|
||||
// validateEntry returned true, and as such we should return it
|
||||
return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
|
||||
}
|
||||
|
||||
module.exports.insert.sync = insertSync
|
||||
module.exports.insert = insert
|
||||
|
||||
function insertSync (cache, key, integrity, opts = {}) {
|
||||
async function insert (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
|
@ -77,50 +116,37 @@ function insertSync (cache, key, integrity, opts = {}) {
|
|||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata
|
||||
metadata,
|
||||
}
|
||||
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
|
||||
const stringified = JSON.stringify(entry)
|
||||
fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
try {
|
||||
fixOwner.chownr.sync(cache, bucket)
|
||||
await mkdir(path.dirname(bucket), { recursive: true })
|
||||
const stringified = JSON.stringify(entry)
|
||||
// NOTE - Cleverness ahoy!
|
||||
//
|
||||
// This works because it's tremendously unlikely for an entry to corrupt
|
||||
// another while still preserving the string length of the JSON in
|
||||
// question. So, we just slap the length in there and verify it on read.
|
||||
//
|
||||
// Thanks to @isaacs for the whiteboarding session that ended up with
|
||||
// this.
|
||||
await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.find = find
|
||||
|
||||
function find (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
return bucketEntries(bucket)
|
||||
.then((entries) => {
|
||||
return entries.reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.find.sync = findSync
|
||||
|
||||
function findSync (cache, key) {
|
||||
async function find (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
return bucketEntriesSync(bucket).reduce((latest, next) => {
|
||||
const entries = await bucketEntries(bucket)
|
||||
return entries.reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
|
@ -138,14 +164,13 @@ function findSync (cache, key) {
|
|||
|
||||
module.exports.delete = del
|
||||
|
||||
function del (cache, key, opts) {
|
||||
return insert(cache, key, null, opts)
|
||||
}
|
||||
function del (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insert(cache, key, null, opts)
|
||||
}
|
||||
|
||||
module.exports.delete.sync = delSync
|
||||
|
||||
function delSync (cache, key, opts) {
|
||||
return insertSync(cache, key, null, opts)
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rm(bucket, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
module.exports.lsStream = lsStream
|
||||
|
@ -154,67 +179,64 @@ function lsStream (cache) {
|
|||
const indexDir = bucketDir(cache)
|
||||
const stream = new Minipass({ objectMode: true })
|
||||
|
||||
readdirOrEmpty(indexDir).then(buckets => Promise.all(
|
||||
buckets.map(bucket => {
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const buckets = await readdirOrEmpty(indexDir)
|
||||
await Promise.all(buckets.map(async (bucket) => {
|
||||
const bucketPath = path.join(indexDir, bucket)
|
||||
return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all(
|
||||
subbuckets.map(subbucket => {
|
||||
const subbucketPath = path.join(bucketPath, subbucket)
|
||||
const subbuckets = await readdirOrEmpty(bucketPath)
|
||||
await Promise.all(subbuckets.map(async (subbucket) => {
|
||||
const subbucketPath = path.join(bucketPath, subbucket)
|
||||
|
||||
// "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
|
||||
return readdirOrEmpty(subbucketPath).then(entries => Promise.all(
|
||||
entries.map(entry => {
|
||||
const entryPath = path.join(subbucketPath, entry)
|
||||
return bucketEntries(entryPath).then(entries =>
|
||||
// using a Map here prevents duplicate keys from
|
||||
// showing up twice, I guess?
|
||||
entries.reduce((acc, entry) => {
|
||||
acc.set(entry.key, entry)
|
||||
return acc
|
||||
}, new Map())
|
||||
).then(reduced => {
|
||||
// reduced is a map of key => entry
|
||||
for (const entry of reduced.values()) {
|
||||
const formatted = formatEntry(cache, entry)
|
||||
if (formatted) {
|
||||
stream.write(formatted)
|
||||
}
|
||||
}
|
||||
}).catch(err => {
|
||||
if (err.code === 'ENOENT') { return undefined }
|
||||
throw err
|
||||
})
|
||||
})
|
||||
))
|
||||
})
|
||||
))
|
||||
})
|
||||
))
|
||||
.then(
|
||||
() => stream.end(),
|
||||
err => stream.emit('error', err)
|
||||
)
|
||||
// "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
|
||||
const subbucketEntries = await readdirOrEmpty(subbucketPath)
|
||||
await Promise.all(subbucketEntries.map(async (entry) => {
|
||||
const entryPath = path.join(subbucketPath, entry)
|
||||
try {
|
||||
const entries = await bucketEntries(entryPath)
|
||||
// using a Map here prevents duplicate keys from showing up
|
||||
// twice, I guess?
|
||||
const reduced = entries.reduce((acc, entry) => {
|
||||
acc.set(entry.key, entry)
|
||||
return acc
|
||||
}, new Map())
|
||||
// reduced is a map of key => entry
|
||||
for (const entry of reduced.values()) {
|
||||
const formatted = formatEntry(cache, entry)
|
||||
if (formatted) {
|
||||
stream.write(formatted)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
stream.end()
|
||||
return stream
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.ls = ls
|
||||
|
||||
function ls (cache) {
|
||||
return lsStream(cache).collect().then(entries =>
|
||||
entries.reduce((acc, xs) => {
|
||||
acc[xs.key] = xs
|
||||
return acc
|
||||
}, {})
|
||||
)
|
||||
async function ls (cache) {
|
||||
const entries = await lsStream(cache).collect()
|
||||
return entries.reduce((acc, xs) => {
|
||||
acc[xs.key] = xs
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
function bucketEntries (bucket, filter) {
|
||||
return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter))
|
||||
}
|
||||
module.exports.bucketEntries = bucketEntries
|
||||
|
||||
function bucketEntriesSync (bucket, filter) {
|
||||
const data = fs.readFileSync(bucket, 'utf8')
|
||||
async function bucketEntries (bucket, filter) {
|
||||
const data = await readFile(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
|
@ -224,6 +246,7 @@ function _bucketEntries (data, filter) {
|
|||
if (!entry) {
|
||||
return
|
||||
}
|
||||
|
||||
const pieces = entry.split('\t')
|
||||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
|
||||
// Hash is no good! Corruption or malice? Doesn't matter!
|
||||
|
@ -233,10 +256,11 @@ function _bucketEntries (data, filter) {
|
|||
let obj
|
||||
try {
|
||||
obj = JSON.parse(pieces[1])
|
||||
} catch (e) {
|
||||
// Entry is corrupted!
|
||||
return
|
||||
} catch (_) {
|
||||
// eslint-ignore-next-line no-empty-block
|
||||
}
|
||||
// coverage disabled here, no need to test with an entry that parses to something falsey
|
||||
// istanbul ignore else
|
||||
if (obj) {
|
||||
entries.push(obj)
|
||||
}
|
||||
|
@ -279,18 +303,19 @@ function hash (str, digest) {
|
|||
.digest('hex')
|
||||
}
|
||||
|
||||
function formatEntry (cache, entry) {
|
||||
function formatEntry (cache, entry, keepAll) {
|
||||
// Treat null digests as deletions. They'll shadow any previous entries.
|
||||
if (!entry.integrity) {
|
||||
if (!entry.integrity && !keepAll) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
key: entry.key,
|
||||
integrity: entry.integrity,
|
||||
path: contentPath(cache, entry.integrity),
|
||||
path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
|
||||
size: entry.size,
|
||||
time: entry.time,
|
||||
metadata: entry.metadata
|
||||
metadata: entry.metadata,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,170 @@
|
|||
'use strict'
|
||||
|
||||
const Collect = require('minipass-collect')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const read = require('./content/read')
|
||||
|
||||
async function getData (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = await read(cache, entry.integrity, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
module.exports = getData
|
||||
|
||||
async function getDataByDigest (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = await read(cache, key, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
}
|
||||
return res
|
||||
}
|
||||
module.exports.byDigest = getDataByDigest
|
||||
|
||||
const getMemoizedStream = (memoized) => {
|
||||
const stream = new Minipass()
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(memoized.entry.metadata)
|
||||
ev === 'integrity' && cb(memoized.entry.integrity)
|
||||
ev === 'size' && cb(memoized.entry.size)
|
||||
})
|
||||
stream.end(memoized.data)
|
||||
return stream
|
||||
}
|
||||
|
||||
function getStream (cache, key, opts = {}) {
|
||||
const { memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return getMemoizedStream(memoized)
|
||||
}
|
||||
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const entry = await index.find(cache, key)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
|
||||
stream.emit('metadata', entry.metadata)
|
||||
stream.emit('integrity', entry.integrity)
|
||||
stream.emit('size', entry.size)
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(entry.metadata)
|
||||
ev === 'integrity' && cb(entry.integrity)
|
||||
ev === 'size' && cb(entry.size)
|
||||
})
|
||||
|
||||
const src = read.readStream(
|
||||
cache,
|
||||
entry.integrity,
|
||||
{ ...opts, size: typeof size !== 'number' ? entry.size : size }
|
||||
)
|
||||
|
||||
if (memoize) {
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put(cache, entry, data, opts))
|
||||
stream.unshift(memoStream)
|
||||
}
|
||||
stream.unshift(src)
|
||||
return stream
|
||||
}).catch((err) => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream = getStream
|
||||
|
||||
function getStreamDigest (cache, integrity, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get.byDigest(cache, integrity, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
const stream = new Minipass()
|
||||
stream.end(memoized)
|
||||
return stream
|
||||
} else {
|
||||
const stream = read.readStream(cache, integrity, opts)
|
||||
if (!memoize) {
|
||||
return stream
|
||||
}
|
||||
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put.byDigest(
|
||||
cache,
|
||||
integrity,
|
||||
data,
|
||||
opts
|
||||
))
|
||||
return new Pipeline(stream, memoStream)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream.byDigest = getStreamDigest
|
||||
|
||||
function info (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(memoized.entry)
|
||||
} else {
|
||||
return index.find(cache, key)
|
||||
}
|
||||
}
|
||||
module.exports.info = info
|
||||
|
||||
async function copy (cache, key, dest, opts = {}) {
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
await read.copy(cache, entry.integrity, dest, opts)
|
||||
return {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
|
||||
async function copyByDigest (cache, key, dest, opts = {}) {
|
||||
await read.copy(cache, key, dest, opts)
|
||||
return key
|
||||
}
|
||||
|
||||
module.exports.copy.byDigest = copyByDigest
|
||||
|
||||
module.exports.hasContent = read.hasContent
|
|
@ -1,27 +1,28 @@
|
|||
'use strict'
|
||||
|
||||
const ls = require('./ls.js')
|
||||
const get = require('./get.js')
|
||||
const put = require('./put.js')
|
||||
const rm = require('./rm.js')
|
||||
const verify = require('./verify.js')
|
||||
const { clearMemoized } = require('./lib/memoization.js')
|
||||
const tmp = require('./lib/util/tmp.js')
|
||||
const { clearMemoized } = require('./memoization.js')
|
||||
const tmp = require('./util/tmp.js')
|
||||
const index = require('./entry-index.js')
|
||||
|
||||
module.exports.ls = ls
|
||||
module.exports.ls.stream = ls.stream
|
||||
module.exports.index = {}
|
||||
module.exports.index.compact = index.compact
|
||||
module.exports.index.insert = index.insert
|
||||
|
||||
module.exports.ls = index.ls
|
||||
module.exports.ls.stream = index.lsStream
|
||||
|
||||
module.exports.get = get
|
||||
module.exports.get.byDigest = get.byDigest
|
||||
module.exports.get.sync = get.sync
|
||||
module.exports.get.sync.byDigest = get.sync.byDigest
|
||||
module.exports.get.stream = get.stream
|
||||
module.exports.get.stream.byDigest = get.stream.byDigest
|
||||
module.exports.get.copy = get.copy
|
||||
module.exports.get.copy.byDigest = get.copy.byDigest
|
||||
module.exports.get.info = get.info
|
||||
module.exports.get.hasContent = get.hasContent
|
||||
module.exports.get.hasContent.sync = get.hasContent.sync
|
||||
|
||||
module.exports.put = put
|
||||
module.exports.put.stream = put.stream
|
|
@ -2,13 +2,11 @@
|
|||
|
||||
const LRU = require('lru-cache')
|
||||
|
||||
const MAX_SIZE = 50 * 1024 * 1024 // 50MB
|
||||
const MAX_AGE = 3 * 60 * 1000
|
||||
|
||||
const MEMOIZED = new LRU({
|
||||
max: MAX_SIZE,
|
||||
maxAge: MAX_AGE,
|
||||
length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length
|
||||
max: 500,
|
||||
maxSize: 50 * 1024 * 1024, // 50MB
|
||||
ttl: 3 * 60 * 1000, // 3 minutes
|
||||
sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
|
||||
})
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
@ -18,7 +16,7 @@ function clearMemoized () {
|
|||
MEMOIZED.forEach((v, k) => {
|
||||
old[k] = v
|
||||
})
|
||||
MEMOIZED.reset()
|
||||
MEMOIZED.clear()
|
||||
return old
|
||||
}
|
||||
|
||||
|
|
|
@ -1,32 +1,29 @@
|
|||
'use strict'
|
||||
|
||||
const index = require('./lib/entry-index')
|
||||
const memo = require('./lib/memoization')
|
||||
const write = require('./lib/content/write')
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const write = require('./content/write')
|
||||
const Flush = require('minipass-flush')
|
||||
const { PassThrough } = require('minipass-collect')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const putOpts = (opts) => ({
|
||||
algorithms: ['sha512'],
|
||||
...opts
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = putData
|
||||
|
||||
function putData (cache, key, data, opts = {}) {
|
||||
async function putData (cache, key, data, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
return write(cache, data, opts).then((res) => {
|
||||
return index
|
||||
.insert(cache, key, res.integrity, { ...opts, size: res.size })
|
||||
.then((entry) => {
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
return res.integrity
|
||||
})
|
||||
})
|
||||
const res = await write(cache, data, opts)
|
||||
const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return res.integrity
|
||||
}
|
||||
|
||||
module.exports.stream = putStream
|
||||
|
@ -36,6 +33,7 @@ function putStream (cache, key, opts = {}) {
|
|||
opts = putOpts(opts)
|
||||
let integrity
|
||||
let size
|
||||
let error
|
||||
|
||||
let memoData
|
||||
const pipeline = new Pipeline()
|
||||
|
@ -57,27 +55,25 @@ function putStream (cache, key, opts = {}) {
|
|||
.on('size', (s) => {
|
||||
size = s
|
||||
})
|
||||
.on('error', (err) => {
|
||||
error = err
|
||||
})
|
||||
|
||||
pipeline.push(contentStream)
|
||||
|
||||
// last but not least, we write the index and emit hash and size,
|
||||
// and memoize if we're doing that
|
||||
pipeline.push(new Flush({
|
||||
flush () {
|
||||
return index
|
||||
.insert(cache, key, integrity, { ...opts, size })
|
||||
.then((entry) => {
|
||||
if (memoize && memoData) {
|
||||
memo.put(cache, entry, memoData, opts)
|
||||
}
|
||||
if (integrity) {
|
||||
pipeline.emit('integrity', integrity)
|
||||
}
|
||||
if (size) {
|
||||
pipeline.emit('size', size)
|
||||
}
|
||||
})
|
||||
}
|
||||
async flush () {
|
||||
if (!error) {
|
||||
const entry = await index.insert(cache, key, integrity, { ...opts, size })
|
||||
if (memoize && memoData) {
|
||||
memo.put(cache, entry, memoData, opts)
|
||||
}
|
||||
pipeline.emit('integrity', integrity)
|
||||
pipeline.emit('size', size)
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
return pipeline
|
|
@ -0,0 +1,31 @@
|
|||
'use strict'
|
||||
|
||||
const { rm } = require('fs/promises')
|
||||
const glob = require('./util/glob.js')
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const path = require('path')
|
||||
const rmContent = require('./content/rm')
|
||||
|
||||
module.exports = entry
|
||||
module.exports.entry = entry
|
||||
|
||||
function entry (cache, key, opts) {
|
||||
memo.clearMemoized()
|
||||
return index.delete(cache, key, opts)
|
||||
}
|
||||
|
||||
module.exports.content = content
|
||||
|
||||
function content (cache, integrity) {
|
||||
memo.clearMemoized()
|
||||
return rmContent(cache, integrity)
|
||||
}
|
||||
|
||||
module.exports.all = all
|
||||
|
||||
async function all (cache) {
|
||||
memo.clearMemoized()
|
||||
const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
|
||||
return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
module.exports.disposer = disposer
|
||||
|
||||
function disposer (creatorFn, disposerFn, fn) {
|
||||
const runDisposer = (resource, result, shouldThrow = false) => {
|
||||
return disposerFn(resource)
|
||||
.then(
|
||||
// disposer resolved, do something with original fn's promise
|
||||
() => {
|
||||
if (shouldThrow) {
|
||||
throw result
|
||||
}
|
||||
return result
|
||||
},
|
||||
// Disposer fn failed, crash process
|
||||
(err) => {
|
||||
throw err
|
||||
// Or process.exit?
|
||||
})
|
||||
}
|
||||
|
||||
return creatorFn
|
||||
.then((resource) => {
|
||||
// fn(resource) can throw, so wrap in a promise here
|
||||
return Promise.resolve().then(() => fn(resource))
|
||||
.then((result) => runDisposer(resource, result))
|
||||
.catch((err) => runDisposer(resource, err, true))
|
||||
})
|
||||
}
|
|
@ -1,145 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const chownr = util.promisify(require('chownr'))
|
||||
const mkdirp = require('mkdirp')
|
||||
const inflight = require('promise-inflight')
|
||||
const inferOwner = require('infer-owner')
|
||||
|
||||
// Memoize getuid()/getgid() calls.
|
||||
// patch process.setuid/setgid to invalidate cached value on change
|
||||
const self = { uid: null, gid: null }
|
||||
const getSelf = () => {
|
||||
if (typeof self.uid !== 'number') {
|
||||
self.uid = process.getuid()
|
||||
const setuid = process.setuid
|
||||
process.setuid = (uid) => {
|
||||
self.uid = null
|
||||
process.setuid = setuid
|
||||
return process.setuid(uid)
|
||||
}
|
||||
}
|
||||
if (typeof self.gid !== 'number') {
|
||||
self.gid = process.getgid()
|
||||
const setgid = process.setgid
|
||||
process.setgid = (gid) => {
|
||||
self.gid = null
|
||||
process.setgid = setgid
|
||||
return process.setgid(gid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.chownr = fixOwner
|
||||
|
||||
function fixOwner (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return Promise.resolve(inferOwner(cache)).then((owner) => {
|
||||
const { uid, gid } = owner
|
||||
|
||||
// No need to override if it's already what we used.
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
return
|
||||
}
|
||||
|
||||
return inflight('fixOwner: fixing ownership on ' + filepath, () =>
|
||||
chownr(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
).catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.chownr.sync = fixOwnerSync
|
||||
|
||||
function fixOwnerSync (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return
|
||||
}
|
||||
const { uid, gid } = inferOwner.sync(cache)
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return
|
||||
}
|
||||
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
// No need to override if it's already what we used.
|
||||
return
|
||||
}
|
||||
try {
|
||||
chownr.sync(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
)
|
||||
} catch (err) {
|
||||
// only catch ENOENT, any other error is a problem.
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.mkdirfix = mkdirfix
|
||||
|
||||
function mkdirfix (cache, p, cb) {
|
||||
// we have to infer the owner _before_ making the directory, even though
|
||||
// we aren't going to use the results, since the cache itself might not
|
||||
// exist yet. If we mkdirp it, then our current uid/gid will be assumed
|
||||
// to be correct if it creates the cache folder in the process.
|
||||
return Promise.resolve(inferOwner(cache)).then(() => {
|
||||
return mkdirp(p)
|
||||
.then((made) => {
|
||||
if (made) {
|
||||
return fixOwner(cache, made).then(() => made)
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === 'EEXIST') {
|
||||
return fixOwner(cache, p).then(() => null)
|
||||
}
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.mkdirfix.sync = mkdirfixSync
|
||||
|
||||
function mkdirfixSync (cache, p) {
|
||||
try {
|
||||
inferOwner.sync(cache)
|
||||
const made = mkdirp.sync(p)
|
||||
if (made) {
|
||||
fixOwnerSync(cache, made)
|
||||
return made
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
fixOwnerSync(cache, p)
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
const glob = promisify(require('glob'))
|
||||
|
||||
const globify = (pattern) => pattern.split('//').join('/')
|
||||
module.exports = (path, options) => glob(globify(path), options)
|
|
@ -1,18 +1,13 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const chmod = util.promisify(fs.chmod)
|
||||
const unlink = util.promisify(fs.unlink)
|
||||
const stat = util.promisify(fs.stat)
|
||||
const move = require('@npmcli/move-file')
|
||||
const fs = require('fs/promises')
|
||||
const { moveFile: move } = require('@npmcli/fs')
|
||||
const pinflight = require('promise-inflight')
|
||||
|
||||
module.exports = moveFile
|
||||
|
||||
function moveFile (src, dest) {
|
||||
const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ ||
|
||||
process.platform === 'win32'
|
||||
async function moveFile (src, dest) {
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
// This isn't quite an fs.rename -- the assumption is that
|
||||
// if `dest` already exists, and we get certain errors while
|
||||
|
@ -23,47 +18,39 @@ function moveFile (src, dest) {
|
|||
// content their own way.
|
||||
//
|
||||
// Note that, as the name suggests, this strictly only supports file moves.
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.link(src, dest, (err) => {
|
||||
if (err) {
|
||||
if (isWindows && err.code === 'EPERM') {
|
||||
// XXX This is a really weird way to handle this situation, as it
|
||||
// results in the src file being deleted even though the dest
|
||||
// might not exist. Since we pretty much always write files to
|
||||
// deterministic locations based on content hash, this is likely
|
||||
// ok (or at worst, just ends in a future cache miss). But it would
|
||||
// be worth investigating at some time in the future if this is
|
||||
// really what we want to do here.
|
||||
return resolve()
|
||||
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
|
||||
// file already exists, so whatever
|
||||
return resolve()
|
||||
} else {
|
||||
return reject(err)
|
||||
try {
|
||||
await fs.link(src, dest)
|
||||
} catch (err) {
|
||||
if (isWindows && err.code === 'EPERM') {
|
||||
// XXX This is a really weird way to handle this situation, as it
|
||||
// results in the src file being deleted even though the dest
|
||||
// might not exist. Since we pretty much always write files to
|
||||
// deterministic locations based on content hash, this is likely
|
||||
// ok (or at worst, just ends in a future cache miss). But it would
|
||||
// be worth investigating at some time in the future if this is
|
||||
// really what we want to do here.
|
||||
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
|
||||
// file already exists, so whatever
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
await Promise.all([
|
||||
fs.unlink(src),
|
||||
!isWindows && fs.chmod(dest, '0444'),
|
||||
])
|
||||
} catch (e) {
|
||||
return pinflight('cacache-move-file:' + dest, async () => {
|
||||
await fs.stat(dest).catch((err) => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// Something else is wrong here. Bail bail bail
|
||||
throw err
|
||||
}
|
||||
} else {
|
||||
return resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
.then(() => {
|
||||
// content should never change for any reason, so make it read-only
|
||||
return Promise.all([
|
||||
unlink(src),
|
||||
!isWindows && chmod(dest, '0444')
|
||||
])
|
||||
})
|
||||
.catch(() => {
|
||||
return pinflight('cacache-move-file:' + dest, () => {
|
||||
return stat(dest).catch((err) => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// Something else is wrong here. Bail bail bail
|
||||
throw err
|
||||
}
|
||||
// file doesn't already exist! let's try a rename -> copy fallback
|
||||
// only delete if it successfully copies
|
||||
return move(src, dest)
|
||||
})
|
||||
})
|
||||
// file doesn't already exist! let's try a rename -> copy fallback
|
||||
// only delete if it successfully copies
|
||||
return move(src, dest)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const fixOwner = require('./fix-owner')
|
||||
const { withTempDir } = require('@npmcli/fs')
|
||||
const fs = require('fs/promises')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const { disposer } = require('./disposer')
|
||||
|
||||
module.exports.mkdir = mktmpdir
|
||||
|
||||
function mktmpdir (cache, opts = {}) {
|
||||
async function mktmpdir (cache, opts = {}) {
|
||||
const { tmpPrefix } = opts
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), tmpPrefix)
|
||||
return fixOwner.mkdirfix(cache, tmpTarget).then(() => {
|
||||
return tmpTarget
|
||||
})
|
||||
const tmpDir = path.join(cache, 'tmp')
|
||||
await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
|
||||
// do not use path.join(), it drops the trailing / if tmpPrefix is unset
|
||||
const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
|
||||
return fs.mkdtemp(target, { owner: 'inherit' })
|
||||
}
|
||||
|
||||
module.exports.withTmp = withTmp
|
||||
|
@ -25,11 +22,5 @@ function withTmp (cache, opts, cb) {
|
|||
cb = opts
|
||||
opts = {}
|
||||
}
|
||||
return disposer(mktmpdir(cache, opts), rimraf, cb)
|
||||
}
|
||||
|
||||
module.exports.fix = fixtmpdir
|
||||
|
||||
function fixtmpdir (cache) {
|
||||
return fixOwner(cache, path.join(cache, 'tmp'))
|
||||
return withTempDir(path.join(cache, 'tmp'), cb, opts)
|
||||
}
|
||||
|
|
381
lib/verify.js
381
lib/verify.js
|
@ -1,35 +1,33 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const {
|
||||
mkdir,
|
||||
readFile,
|
||||
rm,
|
||||
stat,
|
||||
truncate,
|
||||
writeFile,
|
||||
} = require('fs/promises')
|
||||
const pMap = require('p-map')
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const fs = require('fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const glob = util.promisify(require('glob'))
|
||||
const glob = require('./util/glob.js')
|
||||
const index = require('./entry-index')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
|
||||
const hasOwnProperty = (obj, key) =>
|
||||
Object.prototype.hasOwnProperty.call(obj, key)
|
||||
|
||||
const stat = util.promisify(fs.stat)
|
||||
const truncate = util.promisify(fs.truncate)
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
const readFile = util.promisify(fs.readFile)
|
||||
|
||||
const verifyOpts = (opts) => ({
|
||||
concurrency: 20,
|
||||
log: { silly () {} },
|
||||
...opts
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = verify
|
||||
|
||||
function verify (cache, opts) {
|
||||
async function verify (cache, opts) {
|
||||
opts = verifyOpts(opts)
|
||||
opts.log.silly('verify', 'verifying cache at', cache)
|
||||
|
||||
|
@ -40,58 +38,48 @@ function verify (cache, opts) {
|
|||
rebuildIndex,
|
||||
cleanTmp,
|
||||
writeVerifile,
|
||||
markEndTime
|
||||
markEndTime,
|
||||
]
|
||||
|
||||
return steps
|
||||
.reduce((promise, step, i) => {
|
||||
const label = step.name
|
||||
const start = new Date()
|
||||
return promise.then((stats) => {
|
||||
return step(cache, opts).then((s) => {
|
||||
s &&
|
||||
Object.keys(s).forEach((k) => {
|
||||
stats[k] = s[k]
|
||||
})
|
||||
const end = new Date()
|
||||
if (!stats.runTime) {
|
||||
stats.runTime = {}
|
||||
}
|
||||
stats.runTime[label] = end - start
|
||||
return Promise.resolve(stats)
|
||||
})
|
||||
const stats = {}
|
||||
for (const step of steps) {
|
||||
const label = step.name
|
||||
const start = new Date()
|
||||
const s = await step(cache, opts)
|
||||
if (s) {
|
||||
Object.keys(s).forEach((k) => {
|
||||
stats[k] = s[k]
|
||||
})
|
||||
}, Promise.resolve({}))
|
||||
.then((stats) => {
|
||||
stats.runTime.total = stats.endTime - stats.startTime
|
||||
opts.log.silly(
|
||||
'verify',
|
||||
'verification finished for',
|
||||
cache,
|
||||
'in',
|
||||
`${stats.runTime.total}ms`
|
||||
)
|
||||
return stats
|
||||
})
|
||||
}
|
||||
const end = new Date()
|
||||
if (!stats.runTime) {
|
||||
stats.runTime = {}
|
||||
}
|
||||
stats.runTime[label] = end - start
|
||||
}
|
||||
stats.runTime.total = stats.endTime - stats.startTime
|
||||
opts.log.silly(
|
||||
'verify',
|
||||
'verification finished for',
|
||||
cache,
|
||||
'in',
|
||||
`${stats.runTime.total}ms`
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
function markStartTime (cache, opts) {
|
||||
return Promise.resolve({ startTime: new Date() })
|
||||
async function markStartTime (cache, opts) {
|
||||
return { startTime: new Date() }
|
||||
}
|
||||
|
||||
function markEndTime (cache, opts) {
|
||||
return Promise.resolve({ endTime: new Date() })
|
||||
async function markEndTime (cache, opts) {
|
||||
return { endTime: new Date() }
|
||||
}
|
||||
|
||||
function fixPerms (cache, opts) {
|
||||
async function fixPerms (cache, opts) {
|
||||
opts.log.silly('verify', 'fixing cache permissions')
|
||||
return fixOwner
|
||||
.mkdirfix(cache, cache)
|
||||
.then(() => {
|
||||
// TODO - fix file permissions too
|
||||
return fixOwner.chownr(cache, cache)
|
||||
})
|
||||
.then(() => null)
|
||||
await mkdir(cache, { recursive: true })
|
||||
return null
|
||||
}
|
||||
|
||||
// Implements a naive mark-and-sweep tracing garbage collector.
|
||||
|
@ -101,9 +89,9 @@ function fixPerms (cache, opts) {
|
|||
// 2. Mark each integrity value as "live"
|
||||
// 3. Read entire filesystem tree in `content-vX/` dir
|
||||
// 4. If content is live, verify its checksum and delete it if it fails
|
||||
// 5. If content is not marked as live, rimraf it.
|
||||
// 5. If content is not marked as live, rm it.
|
||||
//
|
||||
function garbageCollect (cache, opts) {
|
||||
async function garbageCollect (cache, opts) {
|
||||
opts.log.silly('verify', 'garbage collecting content')
|
||||
const indexStream = index.lsStream(cache)
|
||||
const liveContent = new Set()
|
||||
|
@ -111,177 +99,154 @@ function garbageCollect (cache, opts) {
|
|||
if (opts.filter && !opts.filter(entry)) {
|
||||
return
|
||||
}
|
||||
|
||||
liveContent.add(entry.integrity.toString())
|
||||
})
|
||||
return new Promise((resolve, reject) => {
|
||||
await new Promise((resolve, reject) => {
|
||||
indexStream.on('end', resolve).on('error', reject)
|
||||
}).then(() => {
|
||||
const contentDir = contentPath.contentDir(cache)
|
||||
return glob(path.join(contentDir, '**'), {
|
||||
follow: false,
|
||||
nodir: true,
|
||||
nosort: true
|
||||
}).then((files) => {
|
||||
return Promise.resolve({
|
||||
verifiedContent: 0,
|
||||
reclaimedCount: 0,
|
||||
reclaimedSize: 0,
|
||||
badContentCount: 0,
|
||||
keptSize: 0
|
||||
}).then((stats) =>
|
||||
pMap(
|
||||
files,
|
||||
(f) => {
|
||||
const split = f.split(/[/\\]/)
|
||||
const digest = split.slice(split.length - 3).join('')
|
||||
const algo = split[split.length - 4]
|
||||
const integrity = ssri.fromHex(digest, algo)
|
||||
if (liveContent.has(integrity.toString())) {
|
||||
return verifyContent(f, integrity).then((info) => {
|
||||
if (!info.valid) {
|
||||
stats.reclaimedCount++
|
||||
stats.badContentCount++
|
||||
stats.reclaimedSize += info.size
|
||||
} else {
|
||||
stats.verifiedContent++
|
||||
stats.keptSize += info.size
|
||||
}
|
||||
return stats
|
||||
})
|
||||
} else {
|
||||
// No entries refer to this content. We can delete.
|
||||
stats.reclaimedCount++
|
||||
return stat(f).then((s) => {
|
||||
return rimraf(f).then(() => {
|
||||
stats.reclaimedSize += s.size
|
||||
return stats
|
||||
})
|
||||
})
|
||||
}
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
).then(() => stats)
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function verifyContent (filepath, sri) {
|
||||
return stat(filepath)
|
||||
.then((s) => {
|
||||
const contentInfo = {
|
||||
size: s.size,
|
||||
valid: true
|
||||
}
|
||||
return ssri
|
||||
.checkStream(new fsm.ReadStream(filepath), sri)
|
||||
.catch((err) => {
|
||||
if (err.code !== 'EINTEGRITY') {
|
||||
throw err
|
||||
}
|
||||
return rimraf(filepath).then(() => {
|
||||
contentInfo.valid = false
|
||||
})
|
||||
})
|
||||
.then(() => contentInfo)
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return { size: 0, valid: false }
|
||||
}
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
function rebuildIndex (cache, opts) {
|
||||
opts.log.silly('verify', 'rebuilding index')
|
||||
return index.ls(cache).then((entries) => {
|
||||
const stats = {
|
||||
missingContent: 0,
|
||||
rejectedEntries: 0,
|
||||
totalEntries: 0
|
||||
}
|
||||
const buckets = {}
|
||||
for (const k in entries) {
|
||||
/* istanbul ignore else */
|
||||
if (hasOwnProperty(entries, k)) {
|
||||
const hashed = index.hashKey(k)
|
||||
const entry = entries[k]
|
||||
const excluded = opts.filter && !opts.filter(entry)
|
||||
excluded && stats.rejectedEntries++
|
||||
if (buckets[hashed] && !excluded) {
|
||||
buckets[hashed].push(entry)
|
||||
} else if (buckets[hashed] && excluded) {
|
||||
// skip
|
||||
} else if (excluded) {
|
||||
buckets[hashed] = []
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
const contentDir = contentPath.contentDir(cache)
|
||||
const files = await glob(path.join(contentDir, '**'), {
|
||||
follow: false,
|
||||
nodir: true,
|
||||
nosort: true,
|
||||
})
|
||||
const stats = {
|
||||
verifiedContent: 0,
|
||||
reclaimedCount: 0,
|
||||
reclaimedSize: 0,
|
||||
badContentCount: 0,
|
||||
keptSize: 0,
|
||||
}
|
||||
await pMap(
|
||||
files,
|
||||
async (f) => {
|
||||
const split = f.split(/[/\\]/)
|
||||
const digest = split.slice(split.length - 3).join('')
|
||||
const algo = split[split.length - 4]
|
||||
const integrity = ssri.fromHex(digest, algo)
|
||||
if (liveContent.has(integrity.toString())) {
|
||||
const info = await verifyContent(f, integrity)
|
||||
if (!info.valid) {
|
||||
stats.reclaimedCount++
|
||||
stats.badContentCount++
|
||||
stats.reclaimedSize += info.size
|
||||
} else {
|
||||
buckets[hashed] = [entry]
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
stats.verifiedContent++
|
||||
stats.keptSize += info.size
|
||||
}
|
||||
} else {
|
||||
// No entries refer to this content. We can delete.
|
||||
stats.reclaimedCount++
|
||||
const s = await stat(f)
|
||||
await rm(f, { recursive: true, force: true })
|
||||
stats.reclaimedSize += s.size
|
||||
}
|
||||
}
|
||||
return pMap(
|
||||
Object.keys(buckets),
|
||||
(key) => {
|
||||
return rebuildBucket(cache, buckets[key], stats, opts)
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
).then(() => stats)
|
||||
})
|
||||
return stats
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
function rebuildBucket (cache, bucket, stats, opts) {
|
||||
return truncate(bucket._path).then(() => {
|
||||
// This needs to be serialized because cacache explicitly
|
||||
// lets very racy bucket conflicts clobber each other.
|
||||
return bucket.reduce((promise, entry) => {
|
||||
return promise.then(() => {
|
||||
const content = contentPath(cache, entry.integrity)
|
||||
return stat(content)
|
||||
.then(() => {
|
||||
return index
|
||||
.insert(cache, entry.key, entry.integrity, {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size
|
||||
})
|
||||
.then(() => {
|
||||
stats.totalEntries++
|
||||
})
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
stats.rejectedEntries++
|
||||
stats.missingContent++
|
||||
return
|
||||
}
|
||||
throw err
|
||||
})
|
||||
async function verifyContent (filepath, sri) {
|
||||
const contentInfo = {}
|
||||
try {
|
||||
const { size } = await stat(filepath)
|
||||
contentInfo.size = size
|
||||
contentInfo.valid = true
|
||||
await ssri.checkStream(new fsm.ReadStream(filepath), sri)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return { size: 0, valid: false }
|
||||
}
|
||||
if (err.code !== 'EINTEGRITY') {
|
||||
throw err
|
||||
}
|
||||
|
||||
await rm(filepath, { recursive: true, force: true })
|
||||
contentInfo.valid = false
|
||||
}
|
||||
return contentInfo
|
||||
}
|
||||
|
||||
async function rebuildIndex (cache, opts) {
|
||||
opts.log.silly('verify', 'rebuilding index')
|
||||
const entries = await index.ls(cache)
|
||||
const stats = {
|
||||
missingContent: 0,
|
||||
rejectedEntries: 0,
|
||||
totalEntries: 0,
|
||||
}
|
||||
const buckets = {}
|
||||
for (const k in entries) {
|
||||
/* istanbul ignore else */
|
||||
if (hasOwnProperty(entries, k)) {
|
||||
const hashed = index.hashKey(k)
|
||||
const entry = entries[k]
|
||||
const excluded = opts.filter && !opts.filter(entry)
|
||||
excluded && stats.rejectedEntries++
|
||||
if (buckets[hashed] && !excluded) {
|
||||
buckets[hashed].push(entry)
|
||||
} else if (buckets[hashed] && excluded) {
|
||||
// skip
|
||||
} else if (excluded) {
|
||||
buckets[hashed] = []
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
} else {
|
||||
buckets[hashed] = [entry]
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
await pMap(
|
||||
Object.keys(buckets),
|
||||
(key) => {
|
||||
return rebuildBucket(cache, buckets[key], stats, opts)
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function rebuildBucket (cache, bucket, stats, opts) {
|
||||
await truncate(bucket._path)
|
||||
// This needs to be serialized because cacache explicitly
|
||||
// lets very racy bucket conflicts clobber each other.
|
||||
for (const entry of bucket) {
|
||||
const content = contentPath(cache, entry.integrity)
|
||||
try {
|
||||
await stat(content)
|
||||
await index.insert(cache, entry.key, entry.integrity, {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
})
|
||||
}, Promise.resolve())
|
||||
})
|
||||
stats.totalEntries++
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
stats.rejectedEntries++
|
||||
stats.missingContent++
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanTmp (cache, opts) {
|
||||
opts.log.silly('verify', 'cleaning tmp directory')
|
||||
return rimraf(path.join(cache, 'tmp'))
|
||||
return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
|
||||
}
|
||||
|
||||
function writeVerifile (cache, opts) {
|
||||
async function writeVerifile (cache, opts) {
|
||||
const verifile = path.join(cache, '_lastverified')
|
||||
opts.log.silly('verify', 'writing verifile to ' + verifile)
|
||||
try {
|
||||
return writeFile(verifile, '' + +new Date())
|
||||
} finally {
|
||||
fixOwner.chownr.sync(cache, verifile)
|
||||
}
|
||||
return writeFile(verifile, `${Date.now()}`)
|
||||
}
|
||||
|
||||
module.exports.lastRun = lastRun
|
||||
|
||||
function lastRun (cache) {
|
||||
return readFile(path.join(cache, '_lastverified'), 'utf8').then(
|
||||
(data) => new Date(+data)
|
||||
)
|
||||
async function lastRun (cache) {
|
||||
const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
|
||||
return new Date(+data)
|
||||
}
|
||||
|
|
6
ls.js
6
ls.js
|
@ -1,6 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const index = require('./lib/entry-index')
|
||||
|
||||
module.exports = index.ls
|
||||
module.exports.stream = index.lsStream
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,48 +0,0 @@
|
|||
# minipass-collect
|
||||
|
||||
A Minipass stream that collects all the data into a single chunk
|
||||
|
||||
Note that this buffers ALL data written to it, so it's only good for
|
||||
situations where you are sure the entire stream fits in memory.
|
||||
|
||||
Note: this is primarily useful for the `Collect.PassThrough` class, since
|
||||
Minipass streams already have a `.collect()` method which returns a promise
|
||||
that resolves to the array of chunks, and a `.concat()` method that returns
|
||||
the data concatenated into a single Buffer or String.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Collect = require('minipass-collect')
|
||||
|
||||
const collector = new Collect()
|
||||
collector.on('data', allTheData => {
|
||||
console.log('all the data!', allTheData)
|
||||
})
|
||||
|
||||
someSourceOfData.pipe(collector)
|
||||
|
||||
// note that you can also simply do:
|
||||
someSourceOfData.pipe(new Minipass()).concat().then(data => ...)
|
||||
// or even, if someSourceOfData is a Minipass:
|
||||
someSourceOfData.concat().then(data => ...)
|
||||
// but you might prefer to have it stream-shaped rather than
|
||||
// Promise-shaped in some scenarios.
|
||||
```
|
||||
|
||||
If you want to collect the data, but _also_ act as a passthrough stream,
|
||||
then use `Collect.PassThrough` instead (for example to memoize streaming
|
||||
responses), and listen on the `collect` event.
|
||||
|
||||
```js
|
||||
const Collect = require('minipass-collect')
|
||||
|
||||
const collector = new Collect.PassThrough()
|
||||
collector.on('collect', allTheData => {
|
||||
console.log('all the data!', allTheData)
|
||||
})
|
||||
|
||||
someSourceOfData.pipe(collector).pipe(someOtherStream)
|
||||
```
|
||||
|
||||
All [minipass options](http://npm.im/minipass) are supported.
|
|
@ -1,71 +0,0 @@
|
|||
const Minipass = require('minipass')
|
||||
const _data = Symbol('_data')
|
||||
const _length = Symbol('_length')
|
||||
class Collect extends Minipass {
|
||||
constructor (options) {
|
||||
super(options)
|
||||
this[_data] = []
|
||||
this[_length] = 0
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const c = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding)
|
||||
this[_data].push(c)
|
||||
this[_length] += c.length
|
||||
if (cb)
|
||||
cb()
|
||||
return true
|
||||
}
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
const result = Buffer.concat(this[_data], this[_length])
|
||||
super.write(result)
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
module.exports = Collect
|
||||
|
||||
// it would be possible to DRY this a bit by doing something like
|
||||
// this.collector = new Collect() and listening on its data event,
|
||||
// but it's not much code, and we may as well save the extra obj
|
||||
class CollectPassThrough extends Minipass {
|
||||
constructor (options) {
|
||||
super(options)
|
||||
this[_data] = []
|
||||
this[_length] = 0
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
const c = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk, encoding)
|
||||
this[_data].push(c)
|
||||
this[_length] += c.length
|
||||
return super.write(chunk, encoding, cb)
|
||||
}
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
const result = Buffer.concat(this[_data], this[_length])
|
||||
this.emit('collect', result)
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
module.exports.PassThrough = CollectPassThrough
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"name": "minipass-collect",
|
||||
"version": "1.0.2",
|
||||
"description": "A Minipass stream that collects all the data into a single chunk",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
# ignore most things, include some others
|
||||
/*
|
||||
/.*
|
||||
|
||||
!bin/
|
||||
!lib/
|
||||
!docs/
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!README.md
|
||||
!CONTRIBUTING.md
|
||||
!LICENSE
|
||||
!CHANGELOG.md
|
||||
!example/
|
||||
!scripts/
|
||||
!tap-snapshots/
|
||||
!test/
|
||||
!.travis.yml
|
||||
!.gitignore
|
||||
!.gitattributes
|
||||
!coverage-map.js
|
||||
!index.js
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,47 +0,0 @@
|
|||
# minipass-flush
|
||||
|
||||
A Minipass stream that calls a flush function before emitting 'end'
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Flush = require('minipass-flush')
|
||||
cons f = new Flush({
|
||||
flush (cb) {
|
||||
// call the cb when done, or return a promise
|
||||
// the 'end' event will wait for it, along with
|
||||
// close, finish, and prefinish.
|
||||
// call the cb with an error, or return a rejecting
|
||||
// promise to emit 'error' instead of doing the 'end'
|
||||
return rerouteAllEncryptions().then(() => clearAllChannels())
|
||||
},
|
||||
// all other minipass options accepted as well
|
||||
})
|
||||
|
||||
someDataSource.pipe(f).on('end', () => {
|
||||
// proper flushing has been accomplished
|
||||
})
|
||||
|
||||
// Or as a subclass implementing a 'flush' method:
|
||||
class MyFlush extends Flush {
|
||||
flush (cb) {
|
||||
// old fashioned callback style!
|
||||
rerouteAllEncryptions(er => {
|
||||
if (er)
|
||||
return cb(er)
|
||||
clearAllChannels(er => {
|
||||
if (er)
|
||||
cb(er)
|
||||
cb()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
That's about it.
|
||||
|
||||
If your `flush` method doesn't have to do anything asynchronous, then it's
|
||||
better to call the callback right away in this tick, rather than returning
|
||||
`Promise.resolve()`, so that the `end` event can happen as soon as
|
||||
possible.
|
|
@ -1,39 +0,0 @@
|
|||
const Minipass = require('minipass')
|
||||
const _flush = Symbol('_flush')
|
||||
const _flushed = Symbol('_flushed')
|
||||
const _flushing = Symbol('_flushing')
|
||||
class Flush extends Minipass {
|
||||
constructor (opt = {}) {
|
||||
if (typeof opt === 'function')
|
||||
opt = { flush: opt }
|
||||
|
||||
super(opt)
|
||||
|
||||
// or extend this class and provide a 'flush' method in your subclass
|
||||
if (typeof opt.flush !== 'function' && typeof this.flush !== 'function')
|
||||
throw new TypeError('must provide flush function in options')
|
||||
|
||||
this[_flush] = opt.flush || this.flush
|
||||
}
|
||||
|
||||
emit (ev, ...data) {
|
||||
if ((ev !== 'end' && ev !== 'finish') || this[_flushed])
|
||||
return super.emit(ev, ...data)
|
||||
|
||||
if (this[_flushing])
|
||||
return
|
||||
|
||||
this[_flushing] = true
|
||||
|
||||
const afterFlush = er => {
|
||||
this[_flushed] = true
|
||||
er ? super.emit('error', er) : super.emit('end')
|
||||
}
|
||||
|
||||
const ret = this[_flush](afterFlush)
|
||||
if (ret && ret.then)
|
||||
ret.then(() => afterFlush(), er => afterFlush(er))
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Flush
|
File diff suppressed because it is too large
Load Diff
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"name": "minipass-flush",
|
||||
"version": "1.0.5",
|
||||
"description": "A Minipass stream that calls a flush function before emitting 'end'",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass-flush.git"
|
||||
},
|
||||
"keywords": [
|
||||
"minipass",
|
||||
"flush",
|
||||
"stream"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
const Flush = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
t.test('flush option, ok, cb', t => {
|
||||
let flushCalled = false
|
||||
const f = new Flush((cb) => {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return cb()
|
||||
})
|
||||
f.setEncoding('utf8')
|
||||
f.on('end', () => {
|
||||
t.equal(flushCalled, true, 'called flush before end event')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, ok, promise', t => {
|
||||
let flushCalled = false
|
||||
const f = new Flush({
|
||||
encoding: 'utf8',
|
||||
flush () {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return Promise.resolve(true)
|
||||
}
|
||||
})
|
||||
f.on('end', () => {
|
||||
t.equal(flushCalled, true, 'called flush before end event')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, not ok, cb', t => {
|
||||
let flushCalled = false
|
||||
const poop = new Error('poop')
|
||||
// can override subclass's flush with an option
|
||||
const f = new (class extends Flush {
|
||||
flush (cb) {
|
||||
t.fail('should not call this flush function')
|
||||
}
|
||||
})({
|
||||
encoding: 'utf8',
|
||||
flush (cb) {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return cb(poop)
|
||||
},
|
||||
})
|
||||
|
||||
f.on('error', er => {
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.equal(flushCalled, true, 'called flush before error event')
|
||||
t.equal(er, poop, 'flush error was raised')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('flush option, not ok, promise', t => {
|
||||
let flushCalled = false
|
||||
const poop = new Error('poop')
|
||||
|
||||
// extending a subclass with a flush() method works the same way
|
||||
const f = new (class extends Flush {
|
||||
flush () {
|
||||
t.equal(flushCalled, false, 'call flush one time')
|
||||
flushCalled = true
|
||||
return Promise.reject(poop)
|
||||
}
|
||||
})()
|
||||
f.setEncoding('utf8')
|
||||
|
||||
f.on('error', er => {
|
||||
t.equal(flushCalled, true, 'called flush before error event')
|
||||
t.equal(er, poop, 'flush error was raised')
|
||||
t.equal(sawData, true, 'saw data')
|
||||
t.end()
|
||||
})
|
||||
let sawData = false
|
||||
f.on('data', d => {
|
||||
sawData = true
|
||||
t.equal(d, 'foo')
|
||||
})
|
||||
f.end('foo')
|
||||
})
|
||||
|
||||
t.test('missing flush option throws', t => {
|
||||
t.throws(() => new Flush({}), {
|
||||
message: 'must provide flush function in options'
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('only flush once', t => {
|
||||
const f = new (class extends Flush {
|
||||
flush (cb) {
|
||||
if (this.flushCalled)
|
||||
cb(new Error('called flush more than once'))
|
||||
this.flushCalled = true
|
||||
// why would you do this even, it's a very bad idea!
|
||||
this.emit('end')
|
||||
cb()
|
||||
}
|
||||
})
|
||||
|
||||
f.end()
|
||||
|
||||
let sawEnd = false
|
||||
f.on('end', () => {
|
||||
t.pass('re-emitted end')
|
||||
t.notOk(sawEnd, 'this should be the first time seeing end')
|
||||
sawEnd = true
|
||||
})
|
||||
t.ok(sawEnd, 'should have emitted the first time')
|
||||
f.on('end', () => {
|
||||
t.ok(sawEnd, 'this happens after')
|
||||
t.pass('re-emitted end again')
|
||||
t.end()
|
||||
})
|
||||
})
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,69 +0,0 @@
|
|||
# minipass-pipeline
|
||||
|
||||
Create a pipeline of streams using Minipass.
|
||||
|
||||
Calls `.pipe()` on all the streams in the list. Returns a stream where
|
||||
writes got to the first pipe in the chain, and reads are from the last.
|
||||
|
||||
Errors are proxied along the chain and emitted on the Pipeline stream.
|
||||
|
||||
## USAGE
|
||||
|
||||
```js
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
// the list of streams to pipeline together,
|
||||
// a bit like `input | transform | output` in bash
|
||||
const p = new Pipeline(input, transform, output)
|
||||
|
||||
p.write('foo') // writes to input
|
||||
p.on('data', chunk => doSomething()) // reads from output stream
|
||||
|
||||
// less contrived example (but still pretty contrived)...
|
||||
const decode = new bunzipDecoder()
|
||||
const unpack = tar.extract({ cwd: 'target-dir' })
|
||||
const tbz = new Pipeline(decode, unpack)
|
||||
|
||||
fs.createReadStream('archive.tbz').pipe(tbz)
|
||||
|
||||
// specify any minipass options if you like, as the first argument
|
||||
// it'll only try to pipeline event emitters with a .pipe() method
|
||||
const p = new Pipeline({ objectMode: true }, input, transform, output)
|
||||
|
||||
// If you don't know the things to pipe in right away, that's fine.
|
||||
// use p.push(stream) to add to the end, or p.unshift(stream) to the front
|
||||
const databaseDecoderStreamDoohickey = (connectionInfo) => {
|
||||
const p = new Pipeline()
|
||||
logIntoDatabase(connectionInfo).then(connection => {
|
||||
initializeDecoderRing(connectionInfo).then(decoderRing => {
|
||||
p.push(connection, decoderRing)
|
||||
getUpstreamSource(upstream => {
|
||||
p.unshift(upstream)
|
||||
})
|
||||
})
|
||||
})
|
||||
// return to caller right away
|
||||
// emitted data will be upstream -> connection -> decoderRing pipeline
|
||||
return p
|
||||
}
|
||||
```
|
||||
|
||||
Pipeline is a [minipass](http://npm.im/minipass) stream, so it's as
|
||||
synchronous as the streams it wraps. It will buffer data until there is a
|
||||
reader, but no longer, so make sure to attach your listeners before you
|
||||
pipe it somewhere else.
|
||||
|
||||
## `new Pipeline(opts = {}, ...streams)`
|
||||
|
||||
Create a new Pipeline with the specified Minipass options and any streams
|
||||
provided.
|
||||
|
||||
## `pipeline.push(stream, ...)`
|
||||
|
||||
Attach one or more streams to the pipeline at the end (read) side of the
|
||||
pipe chain.
|
||||
|
||||
## `pipeline.unshift(stream, ...)`
|
||||
|
||||
Attach one or more streams to the pipeline at the start (write) side of the
|
||||
pipe chain.
|
|
@ -1,128 +0,0 @@
|
|||
const Minipass = require('minipass')
|
||||
const EE = require('events')
|
||||
const isStream = s => s && s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
)
|
||||
|
||||
const _head = Symbol('_head')
|
||||
const _tail = Symbol('_tail')
|
||||
const _linkStreams = Symbol('_linkStreams')
|
||||
const _setHead = Symbol('_setHead')
|
||||
const _setTail = Symbol('_setTail')
|
||||
const _onError = Symbol('_onError')
|
||||
const _onData = Symbol('_onData')
|
||||
const _onEnd = Symbol('_onEnd')
|
||||
const _onDrain = Symbol('_onDrain')
|
||||
const _streams = Symbol('_streams')
|
||||
class Pipeline extends Minipass {
|
||||
constructor (opts, ...streams) {
|
||||
if (isStream(opts)) {
|
||||
streams.unshift(opts)
|
||||
opts = {}
|
||||
}
|
||||
|
||||
super(opts)
|
||||
this[_streams] = []
|
||||
if (streams.length)
|
||||
this.push(...streams)
|
||||
}
|
||||
|
||||
[_linkStreams] (streams) {
|
||||
// reduce takes (left,right), and we return right to make it the
|
||||
// new left value.
|
||||
return streams.reduce((src, dest) => {
|
||||
src.on('error', er => dest.emit('error', er))
|
||||
src.pipe(dest)
|
||||
return dest
|
||||
})
|
||||
}
|
||||
|
||||
push (...streams) {
|
||||
this[_streams].push(...streams)
|
||||
if (this[_tail])
|
||||
streams.unshift(this[_tail])
|
||||
|
||||
const linkRet = this[_linkStreams](streams)
|
||||
|
||||
this[_setTail](linkRet)
|
||||
if (!this[_head])
|
||||
this[_setHead](streams[0])
|
||||
}
|
||||
|
||||
unshift (...streams) {
|
||||
this[_streams].unshift(...streams)
|
||||
if (this[_head])
|
||||
streams.push(this[_head])
|
||||
|
||||
const linkRet = this[_linkStreams](streams)
|
||||
this[_setHead](streams[0])
|
||||
if (!this[_tail])
|
||||
this[_setTail](linkRet)
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
// set fire to the whole thing.
|
||||
this[_streams].forEach(s =>
|
||||
typeof s.destroy === 'function' && s.destroy())
|
||||
return super.destroy(er)
|
||||
}
|
||||
|
||||
// readable interface -> tail
|
||||
[_setTail] (stream) {
|
||||
this[_tail] = stream
|
||||
stream.on('error', er => this[_onError](stream, er))
|
||||
stream.on('data', chunk => this[_onData](stream, chunk))
|
||||
stream.on('end', () => this[_onEnd](stream))
|
||||
stream.on('finish', () => this[_onEnd](stream))
|
||||
}
|
||||
|
||||
// errors proxied down the pipeline
|
||||
// they're considered part of the "read" interface
|
||||
[_onError] (stream, er) {
|
||||
if (stream === this[_tail])
|
||||
this.emit('error', er)
|
||||
}
|
||||
[_onData] (stream, chunk) {
|
||||
if (stream === this[_tail])
|
||||
super.write(chunk)
|
||||
}
|
||||
[_onEnd] (stream) {
|
||||
if (stream === this[_tail])
|
||||
super.end()
|
||||
}
|
||||
pause () {
|
||||
super.pause()
|
||||
return this[_tail] && this[_tail].pause && this[_tail].pause()
|
||||
}
|
||||
|
||||
// NB: Minipass calls its internal private [RESUME] method during
|
||||
// pipe drains, to avoid hazards where stream.resume() is overridden.
|
||||
// Thus, we need to listen to the resume *event*, not override the
|
||||
// resume() method, and proxy *that* to the tail.
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'resume' && this[_tail] && this[_tail].resume)
|
||||
this[_tail].resume()
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
|
||||
// writable interface -> head
|
||||
[_setHead] (stream) {
|
||||
this[_head] = stream
|
||||
stream.on('drain', () => this[_onDrain](stream))
|
||||
}
|
||||
[_onDrain] (stream) {
|
||||
if (stream === this[_head])
|
||||
this.emit('drain')
|
||||
}
|
||||
write (chunk, enc, cb) {
|
||||
return this[_head].write(chunk, enc, cb) &&
|
||||
(this.flowing || this.buffer.length === 0)
|
||||
}
|
||||
end (chunk, enc, cb) {
|
||||
this[_head].end(chunk, enc, cb)
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Pipeline
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"name": "minipass-pipeline",
|
||||
"version": "1.2.4",
|
||||
"description": "create a pipeline of streams using Minipass",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
|
||||
"license": "ISC",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^14.6.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"minipass": "^3.0.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
.*.swp
|
||||
node_modules
|
||||
.nyc_output/
|
||||
coverage/
|
|
@ -1,5 +0,0 @@
|
|||
language: node_js
|
||||
sudo: false
|
||||
node_js:
|
||||
- 12
|
||||
- 10
|
|
@ -1,15 +0,0 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) npm, Inc. and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -1,613 +0,0 @@
|
|||
# minipass
|
||||
|
||||
A _very_ minimal implementation of a [PassThrough
|
||||
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
|
||||
|
||||
[It's very
|
||||
fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
|
||||
for objects, strings, and buffers.
|
||||
|
||||
Supports pipe()ing (including multi-pipe() and backpressure transmission),
|
||||
buffering data until either a `data` event handler or `pipe()` is added (so
|
||||
you don't lose the first chunk), and most other cases where PassThrough is
|
||||
a good idea.
|
||||
|
||||
There is a `read()` method, but it's much more efficient to consume data
|
||||
from this stream via `'data'` events or by calling `pipe()` into some other
|
||||
stream. Calling `read()` requires the buffer to be flattened in some
|
||||
cases, which requires copying memory.
|
||||
|
||||
There is also no `unpipe()` method. Once you start piping, there is no
|
||||
stopping it!
|
||||
|
||||
If you set `objectMode: true` in the options, then whatever is written will
|
||||
be emitted. Otherwise, it'll do a minimal amount of Buffer copying to
|
||||
ensure proper Streams semantics when `read(n)` is called.
|
||||
|
||||
`objectMode` can also be set by doing `stream.objectMode = true`, or by
|
||||
writing any non-string/non-buffer data. `objectMode` cannot be set to
|
||||
false once it is set.
|
||||
|
||||
This is not a `through` or `through2` stream. It doesn't transform the
|
||||
data, it just passes it right through. If you want to transform the data,
|
||||
extend the class, and override the `write()` method. Once you're done
|
||||
transforming the data however you want, call `super.write()` with the
|
||||
transform output.
|
||||
|
||||
For some examples of streams that extend Minipass in various ways, check
|
||||
out:
|
||||
|
||||
- [minizlib](http://npm.im/minizlib)
|
||||
- [fs-minipass](http://npm.im/fs-minipass)
|
||||
- [tar](http://npm.im/tar)
|
||||
- [minipass-collect](http://npm.im/minipass-collect)
|
||||
- [minipass-flush](http://npm.im/minipass-flush)
|
||||
- [minipass-pipeline](http://npm.im/minipass-pipeline)
|
||||
- [tap](http://npm.im/tap)
|
||||
- [tap-parser](http://npm.im/tap)
|
||||
- [treport](http://npm.im/tap)
|
||||
- [minipass-fetch](http://npm.im/minipass-fetch)
|
||||
- [pacote](http://npm.im/pacote)
|
||||
- [make-fetch-happen](http://npm.im/make-fetch-happen)
|
||||
- [cacache](http://npm.im/cacache)
|
||||
- [ssri](http://npm.im/ssri)
|
||||
- [npm-registry-fetch](http://npm.im/npm-registry-fetch)
|
||||
- [minipass-json-stream](http://npm.im/minipass-json-stream)
|
||||
- [minipass-sized](http://npm.im/minipass-sized)
|
||||
|
||||
## Differences from Node.js Streams
|
||||
|
||||
There are several things that make Minipass streams different from (and in
|
||||
some ways superior to) Node.js core streams.
|
||||
|
||||
Please read these caveats if you are familiar with noode-core streams and
|
||||
intend to use Minipass streams in your programs.
|
||||
|
||||
### Timing
|
||||
|
||||
Minipass streams are designed to support synchronous use-cases. Thus, data
|
||||
is emitted as soon as it is available, always. It is buffered until read,
|
||||
but no longer. Another way to look at it is that Minipass streams are
|
||||
exactly as synchronous as the logic that writes into them.
|
||||
|
||||
This can be surprising if your code relies on `PassThrough.write()` always
|
||||
providing data on the next tick rather than the current one, or being able
|
||||
to call `resume()` and not have the entire buffer disappear immediately.
|
||||
|
||||
However, without this synchronicity guarantee, there would be no way for
|
||||
Minipass to achieve the speeds it does, or support the synchronous use
|
||||
cases that it does. Simply put, waiting takes time.
|
||||
|
||||
This non-deferring approach makes Minipass streams much easier to reason
|
||||
about, especially in the context of Promises and other flow-control
|
||||
mechanisms.
|
||||
|
||||
### No High/Low Water Marks
|
||||
|
||||
Node.js core streams will optimistically fill up a buffer, returning `true`
|
||||
on all writes until the limit is hit, even if the data has nowhere to go.
|
||||
Then, they will not attempt to draw more data in until the buffer size dips
|
||||
below a minimum value.
|
||||
|
||||
Minipass streams are much simpler. The `write()` method will return `true`
|
||||
if the data has somewhere to go (which is to say, given the timing
|
||||
guarantees, that the data is already there by the time `write()` returns).
|
||||
|
||||
If the data has nowhere to go, then `write()` returns false, and the data
|
||||
sits in a buffer, to be drained out immediately as soon as anyone consumes
|
||||
it.
|
||||
|
||||
### Hazards of Buffering (or: Why Minipass Is So Fast)
|
||||
|
||||
Since data written to a Minipass stream is immediately written all the way
|
||||
through the pipeline, and `write()` always returns true/false based on
|
||||
whether the data was fully flushed, backpressure is communicated
|
||||
immediately to the upstream caller. This minimizes buffering.
|
||||
|
||||
Consider this case:
|
||||
|
||||
```js
|
||||
const {PassThrough} = require('stream')
|
||||
const p1 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p2 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p3 = new PassThrough({ highWaterMark: 1024 })
|
||||
const p4 = new PassThrough({ highWaterMark: 1024 })
|
||||
|
||||
p1.pipe(p2).pipe(p3).pipe(p4)
|
||||
p4.on('data', () => console.log('made it through'))
|
||||
|
||||
// this returns false and buffers, then writes to p2 on next tick (1)
|
||||
// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
|
||||
// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
|
||||
// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
|
||||
// on next tick (4)
|
||||
// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
|
||||
// 'drain' on next tick (5)
|
||||
// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
|
||||
// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
|
||||
// tick (7)
|
||||
|
||||
p1.write(Buffer.alloc(2048)) // returns false
|
||||
```
|
||||
|
||||
Along the way, the data was buffered and deferred at each stage, and
|
||||
multiple event deferrals happened, for an unblocked pipeline where it was
|
||||
perfectly safe to write all the way through!
|
||||
|
||||
Furthermore, setting a `highWaterMark` of `1024` might lead someone reading
|
||||
the code to think an advisory maximum of 1KiB is being set for the
|
||||
pipeline. However, the actual advisory buffering level is the _sum_ of
|
||||
`highWaterMark` values, since each one has its own bucket.
|
||||
|
||||
Consider the Minipass case:
|
||||
|
||||
```js
|
||||
const m1 = new Minipass()
|
||||
const m2 = new Minipass()
|
||||
const m3 = new Minipass()
|
||||
const m4 = new Minipass()
|
||||
|
||||
m1.pipe(m2).pipe(m3).pipe(m4)
|
||||
m4.on('data', () => console.log('made it through'))
|
||||
|
||||
// m1 is flowing, so it writes the data to m2 immediately
|
||||
// m2 is flowing, so it writes the data to m3 immediately
|
||||
// m3 is flowing, so it writes the data to m4 immediately
|
||||
// m4 is flowing, so it fires the 'data' event immediately, returns true
|
||||
// m4's write returned true, so m3 is still flowing, returns true
|
||||
// m3's write returned true, so m2 is still flowing, returns true
|
||||
// m2's write returned true, so m1 is still flowing, returns true
|
||||
// No event deferrals or buffering along the way!
|
||||
|
||||
m1.write(Buffer.alloc(2048)) // returns true
|
||||
```
|
||||
|
||||
It is extremely unlikely that you _don't_ want to buffer any data written,
|
||||
or _ever_ buffer data that can be flushed all the way through. Neither
|
||||
node-core streams nor Minipass ever fail to buffer written data, but
|
||||
node-core streams do a lot of unnecessary buffering and pausing.
|
||||
|
||||
As always, the faster implementation is the one that does less stuff and
|
||||
waits less time to do it.
|
||||
|
||||
### Immediately emit `end` for empty streams (when not paused)
|
||||
|
||||
If a stream is not paused, and `end()` is called before writing any data
|
||||
into it, then it will emit `end` immediately.
|
||||
|
||||
If you have logic that occurs on the `end` event which you don't want to
|
||||
potentially happen immediately (for example, closing file descriptors,
|
||||
moving on to the next entry in an archive parse stream, etc.) then be sure
|
||||
to call `stream.pause()` on creation, and then `stream.resume()` once you
|
||||
are ready to respond to the `end` event.
|
||||
|
||||
### Emit `end` When Asked
|
||||
|
||||
One hazard of immediately emitting `'end'` is that you may not yet have had
|
||||
a chance to add a listener. In order to avoid this hazard, Minipass
|
||||
streams safely re-emit the `'end'` event if a new listener is added after
|
||||
`'end'` has been emitted.
|
||||
|
||||
Ie, if you do `stream.on('end', someFunction)`, and the stream has already
|
||||
emitted `end`, then it will call the handler right away. (You can think of
|
||||
this somewhat like attaching a new `.then(fn)` to a previously-resolved
|
||||
Promise.)
|
||||
|
||||
To prevent calling handlers multiple times who would not expect multiple
|
||||
ends to occur, all listeners are removed from the `'end'` event whenever it
|
||||
is emitted.
|
||||
|
||||
### Impact of "immediate flow" on Tee-streams
|
||||
|
||||
A "tee stream" is a stream piping to multiple destinations:
|
||||
|
||||
```js
|
||||
const tee = new Minipass()
|
||||
t.pipe(dest1)
|
||||
t.pipe(dest2)
|
||||
t.write('foo') // goes to both destinations
|
||||
```
|
||||
|
||||
Since Minipass streams _immediately_ process any pending data through the
|
||||
pipeline when a new pipe destination is added, this can have surprising
|
||||
effects, especially when a stream comes in from some other function and may
|
||||
or may not have data in its buffer.
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
|
||||
src.pipe(dest2) // gets nothing!
|
||||
```
|
||||
|
||||
The solution is to create a dedicated tee-stream junction that pipes to
|
||||
both locations, and then pipe to _that_ instead.
|
||||
|
||||
```js
|
||||
// Safe example: tee to both places
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.pipe(dest1)
|
||||
tee.pipe(dest2)
|
||||
src.pipe(tee) // tee gets 'foo', pipes to both locations
|
||||
```
|
||||
|
||||
The same caveat applies to `on('data')` event listeners. The first one
|
||||
added will _immediately_ receive all of the data, leaving nothing for the
|
||||
second:
|
||||
|
||||
```js
|
||||
// WARNING! WILL LOSE DATA!
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
src.on('data', handler1) // receives 'foo' right away
|
||||
src.on('data', handler2) // nothing to see here!
|
||||
```
|
||||
|
||||
Using a dedicated tee-stream can be used in this case as well:
|
||||
|
||||
```js
|
||||
// Safe example: tee to both data handlers
|
||||
const src = new Minipass()
|
||||
src.write('foo')
|
||||
const tee = new Minipass()
|
||||
tee.on('data', handler1)
|
||||
tee.on('data', handler2)
|
||||
src.pipe(tee)
|
||||
```
|
||||
|
||||
## USAGE
|
||||
|
||||
It's a stream! Use it like a stream and it'll most likely do what you
|
||||
want.
|
||||
|
||||
```js
|
||||
const Minipass = require('minipass')
|
||||
const mp = new Minipass(options) // optional: { encoding, objectMode }
|
||||
mp.write('foo')
|
||||
mp.pipe(someOtherStream)
|
||||
mp.end('bar')
|
||||
```
|
||||
|
||||
### OPTIONS
|
||||
|
||||
* `encoding` How would you like the data coming _out_ of the stream to be
|
||||
encoded? Accepts any values that can be passed to `Buffer.toString()`.
|
||||
* `objectMode` Emit data exactly as it comes in. This will be flipped on
|
||||
by default if you write() something other than a string or Buffer at any
|
||||
point. Setting `objectMode: true` will prevent setting any encoding
|
||||
value.
|
||||
|
||||
### API
|
||||
|
||||
Implements the user-facing portions of Node.js's `Readable` and `Writable`
|
||||
streams.
|
||||
|
||||
### Methods
|
||||
|
||||
* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the
|
||||
base Minipass class, the same data will come out.) Returns `false` if
|
||||
the stream will buffer the next write, or true if it's still in "flowing"
|
||||
mode.
|
||||
* `end([chunk, [encoding]], [callback])` - Signal that you have no more
|
||||
data to write. This will queue an `end` event to be fired when all the
|
||||
data has been consumed.
|
||||
* `setEncoding(encoding)` - Set the encoding for data coming of the stream.
|
||||
This can only be done once.
|
||||
* `pause()` - No more data for a while, please. This also prevents `end`
|
||||
from being emitted for empty streams until the stream is resumed.
|
||||
* `resume()` - Resume the stream. If there's data in the buffer, it is all
|
||||
discarded. Any buffered events are immediately emitted.
|
||||
* `pipe(dest)` - Send all output to the stream provided. There is no way
|
||||
to unpipe. When data is emitted, it is immediately written to any and
|
||||
all pipe destinations.
|
||||
* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some
|
||||
events are given special treatment, however. (See below under "events".)
|
||||
* `promise()` - Returns a Promise that resolves when the stream emits
|
||||
`end`, or rejects if the stream emits `error`.
|
||||
* `collect()` - Return a Promise that resolves on `end` with an array
|
||||
containing each chunk of data that was emitted, or rejects if the stream
|
||||
emits `error`. Note that this consumes the stream data.
|
||||
* `concat()` - Same as `collect()`, but concatenates the data into a single
|
||||
Buffer object. Will reject the returned promise if the stream is in
|
||||
objectMode, or if it goes into objectMode by the end of the data.
|
||||
* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not
|
||||
provided, then consume all of it. If `n` bytes are not available, then
|
||||
it returns null. **Note** consuming streams in this way is less
|
||||
efficient, and can lead to unnecessary Buffer copying.
|
||||
* `destroy([er])` - Destroy the stream. If an error is provided, then an
|
||||
`'error'` event is emitted. If the stream has a `close()` method, and
|
||||
has not emitted a `'close'` event yet, then `stream.close()` will be
|
||||
called. Any Promises returned by `.promise()`, `.collect()` or
|
||||
`.concat()` will be rejected. After being destroyed, writing to the
|
||||
stream will emit an error. No more data will be emitted if the stream is
|
||||
destroyed, even if it was previously buffered.
|
||||
|
||||
### Properties
|
||||
|
||||
* `bufferLength` Read-only. Total number of bytes buffered, or in the case
|
||||
of objectMode, the total number of objects.
|
||||
* `encoding` The encoding that has been set. (Setting this is equivalent
|
||||
to calling `setEncoding(enc)` and has the same prohibition against
|
||||
setting multiple times.)
|
||||
* `flowing` Read-only. Boolean indicating whether a chunk written to the
|
||||
stream will be immediately emitted.
|
||||
* `emittedEnd` Read-only. Boolean indicating whether the end-ish events
|
||||
(ie, `end`, `prefinish`, `finish`) have been emitted. Note that
|
||||
listening on any end-ish event will immediateyl re-emit it if it has
|
||||
already been emitted.
|
||||
* `writable` Whether the stream is writable. Default `true`. Set to
|
||||
`false` when `end()`
|
||||
* `readable` Whether the stream is readable. Default `true`.
|
||||
* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written
|
||||
to the stream that have not yet been emitted. (It's probably a bad idea
|
||||
to mess with this.)
|
||||
* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that
|
||||
this stream is piping into. (It's probably a bad idea to mess with
|
||||
this.)
|
||||
* `destroyed` A getter that indicates whether the stream was destroyed.
|
||||
* `paused` True if the stream has been explicitly paused, otherwise false.
|
||||
* `objectMode` Indicates whether the stream is in `objectMode`. Once set
|
||||
to `true`, it cannot be set to `false`.
|
||||
|
||||
### Events
|
||||
|
||||
* `data` Emitted when there's data to read. Argument is the data to read.
|
||||
This is never emitted while not flowing. If a listener is attached, that
|
||||
will resume the stream.
|
||||
* `end` Emitted when there's no more data to read. This will be emitted
|
||||
immediately for empty streams when `end()` is called. If a listener is
|
||||
attached, and `end` was already emitted, then it will be emitted again.
|
||||
All listeners are removed when `end` is emitted.
|
||||
* `prefinish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'end'`.
|
||||
* `finish` An end-ish event that follows the same logic as `end` and is
|
||||
emitted in the same conditions where `end` is emitted. Emitted after
|
||||
`'prefinish'`.
|
||||
* `close` An indication that an underlying resource has been released.
|
||||
Minipass does not emit this event, but will defer it until after `end`
|
||||
has been emitted, since it throws off some stream libraries otherwise.
|
||||
* `drain` Emitted when the internal buffer empties, and it is again
|
||||
suitable to `write()` into the stream.
|
||||
* `readable` Emitted when data is buffered and ready to be read by a
|
||||
consumer.
|
||||
* `resume` Emitted when stream changes state from buffering to flowing
|
||||
mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event
|
||||
listener is added.)
|
||||
|
||||
### Static Methods
|
||||
|
||||
* `Minipass.isStream(stream)` Returns `true` if the argument is a stream,
|
||||
and false otherwise. To be considered a stream, the object must be
|
||||
either an instance of Minipass, or an EventEmitter that has either a
|
||||
`pipe()` method, or both `write()` and `end()` methods. (Pretty much any
|
||||
stream in node-land will return `true` for this.)
|
||||
|
||||
## EXAMPLES
|
||||
|
||||
Here are some examples of things you can do with Minipass streams.
|
||||
|
||||
### simple "are you done yet" promise
|
||||
|
||||
```js
|
||||
mp.promise().then(() => {
|
||||
// stream is finished
|
||||
}, er => {
|
||||
// stream emitted an error
|
||||
})
|
||||
```
|
||||
|
||||
### collecting
|
||||
|
||||
```js
|
||||
mp.collect().then(all => {
|
||||
// all is an array of all the data emitted
|
||||
// encoding is supported in this case, so
|
||||
// so the result will be a collection of strings if
|
||||
// an encoding is specified, or buffers/objects if not.
|
||||
//
|
||||
// In an async function, you may do
|
||||
// const data = await stream.collect()
|
||||
})
|
||||
```
|
||||
|
||||
### collecting into a single blob
|
||||
|
||||
This is a bit slower because it concatenates the data into one chunk for
|
||||
you, but if you're going to do it yourself anyway, it's convenient this
|
||||
way:
|
||||
|
||||
```js
|
||||
mp.concat().then(onebigchunk => {
|
||||
// onebigchunk is a string if the stream
|
||||
// had an encoding set, or a buffer otherwise.
|
||||
})
|
||||
```
|
||||
|
||||
### iteration
|
||||
|
||||
You can iterate over streams synchronously or asynchronously in platforms
|
||||
that support it.
|
||||
|
||||
Synchronous iteration will end when the currently available data is
|
||||
consumed, even if the `end` event has not been reached. In string and
|
||||
buffer mode, the data is concatenated, so unless multiple writes are
|
||||
occurring in the same tick as the `read()`, sync iteration loops will
|
||||
generally only have a single iteration.
|
||||
|
||||
To consume chunks in this way exactly as they have been written, with no
|
||||
flattening, create the stream with the `{ objectMode: true }` option.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // a, b
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // c, d
|
||||
}
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
console.log(letter) // nothing
|
||||
}
|
||||
```
|
||||
|
||||
Asynchronous iteration will continue until the end event is reached,
|
||||
consuming all of the data.
|
||||
|
||||
```js
|
||||
const mp = new Minipass({ encoding: 'utf8' })
|
||||
|
||||
// some source of some data
|
||||
let i = 5
|
||||
const inter = setInterval(() => {
|
||||
if (i --> 0)
|
||||
mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end()
|
||||
clearInterval(inter)
|
||||
}
|
||||
}, 100)
|
||||
|
||||
// consume the data with asynchronous iteration
|
||||
async function consume () {
|
||||
for await (let chunk of mp) {
|
||||
console.log(chunk)
|
||||
}
|
||||
return 'ok'
|
||||
}
|
||||
|
||||
consume().then(res => console.log(res))
|
||||
// logs `foo\n` 5 times, and then `ok`
|
||||
```
|
||||
|
||||
### subclass that `console.log()`s everything written into it
|
||||
|
||||
```js
|
||||
class Logger extends Minipass {
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}
|
||||
|
||||
someSource.pipe(new Logger()).pipe(someDest)
|
||||
```
|
||||
|
||||
### same thing, but using an inline anonymous class
|
||||
|
||||
```js
|
||||
// js classes are fun
|
||||
someSource
|
||||
.pipe(new (class extends Minipass {
|
||||
emit (ev, ...data) {
|
||||
// let's also log events, because debugging some weird thing
|
||||
console.log('EMIT', ev)
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
write (chunk, encoding, callback) {
|
||||
console.log('WRITE', chunk, encoding)
|
||||
return super.write(chunk, encoding, callback)
|
||||
}
|
||||
end (chunk, encoding, callback) {
|
||||
console.log('END', chunk, encoding)
|
||||
return super.end(chunk, encoding, callback)
|
||||
}
|
||||
}))
|
||||
.pipe(someDest)
|
||||
```
|
||||
|
||||
### subclass that defers 'end' for some reason
|
||||
|
||||
```js
|
||||
class SlowEnd extends Minipass {
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'end') {
|
||||
console.log('going to end, hold on a sec')
|
||||
setTimeout(() => {
|
||||
console.log('ok, ready to end now')
|
||||
super.emit('end', ...args)
|
||||
}, 100)
|
||||
} else {
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that creates newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONEncode extends Minipass {
|
||||
write (obj, cb) {
|
||||
try {
|
||||
// JSON.stringify can throw, emit an error on that
|
||||
return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
}
|
||||
}
|
||||
end (obj, cb) {
|
||||
if (typeof obj === 'function') {
|
||||
cb = obj
|
||||
obj = undefined
|
||||
}
|
||||
if (obj !== undefined) {
|
||||
this.write(obj)
|
||||
}
|
||||
return super.end(cb)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### transform that parses newline-delimited JSON
|
||||
|
||||
```js
|
||||
class NDJSONDecode extends Minipass {
|
||||
constructor (options) {
|
||||
// always be in object mode, as far as Minipass is concerned
|
||||
super({ objectMode: true })
|
||||
this._jsonBuffer = ''
|
||||
}
|
||||
write (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'string' &&
|
||||
typeof encoding === 'string' &&
|
||||
encoding !== 'utf8') {
|
||||
chunk = Buffer.from(chunk, encoding).toString()
|
||||
} else if (Buffer.isBuffer(chunk))
|
||||
chunk = chunk.toString()
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding
|
||||
}
|
||||
const jsonData = (this._jsonBuffer + chunk).split('\n')
|
||||
this._jsonBuffer = jsonData.pop()
|
||||
for (let i = 0; i < jsonData.length; i++) {
|
||||
let parsed
|
||||
try {
|
||||
super.write(parsed)
|
||||
} catch (er) {
|
||||
this.emit('error', er)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (cb)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
```
|
|
@ -1,11 +0,0 @@
|
|||
'use strict'
|
||||
const MiniPass = require('../..')
|
||||
|
||||
module.exports = class ExtendMiniPass extends MiniPass {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
}
|
||||
write (data, encoding) {
|
||||
return super.write(data, encoding)
|
||||
}
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
'use strict'
|
||||
const through2 = require('through2')
|
||||
module.exports = function (opt) {
|
||||
return opt.objectMode
|
||||
? through2.obj(func)
|
||||
: through2(func)
|
||||
|
||||
function func (data, enc, done) {
|
||||
this.push(data, enc)
|
||||
done()
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
'use strict'
|
||||
const stream = require('stream')
|
||||
module.exports = class ExtendTransform extends stream.Transform {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
}
|
||||
_transform (data, enc, done) {
|
||||
this.push(data, enc)
|
||||
done()
|
||||
}
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
'use strict'
|
||||
const EE = require('events').EventEmitter
|
||||
|
||||
module.exports = class NullSink extends EE {
|
||||
write (data, encoding, next) {
|
||||
if (next) next()
|
||||
return true
|
||||
}
|
||||
end () {
|
||||
this.emit('finish')
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
'use strict'
|
||||
const stream = require('stream')
|
||||
|
||||
const numbers = new Array(1000).join(',').split(',').map((v, k) => k)
|
||||
let acc = ''
|
||||
const strings = numbers.map(n => acc += n)
|
||||
const bufs = strings.map(s => new Buffer(s))
|
||||
const objs = strings.map(s => ({ str: s }))
|
||||
|
||||
module.exports = class Numbers {
|
||||
constructor (opt) {
|
||||
this.objectMode = opt.objectMode
|
||||
this.encoding = opt.encoding
|
||||
this.ii = 0
|
||||
this.done = false
|
||||
}
|
||||
pipe (dest) {
|
||||
this.dest = dest
|
||||
this.go()
|
||||
return dest
|
||||
}
|
||||
|
||||
go () {
|
||||
let flowing = true
|
||||
while (flowing) {
|
||||
if (this.ii >= 1000) {
|
||||
this.dest.end()
|
||||
this.done = true
|
||||
flowing = false
|
||||
} else {
|
||||
flowing = this.dest.write(
|
||||
(this.objectMode ? objs
|
||||
: this.encoding ? strings
|
||||
: bufs)[this.ii++])
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.done)
|
||||
this.dest.once('drain', _ => this.go())
|
||||
}
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
'use strict'
|
||||
module.exports = _ => {
|
||||
const start = process.hrtime()
|
||||
return _ => {
|
||||
const end = process.hrtime(start)
|
||||
const ms = Math.round(end[0]*1e6 + end[1]/1e3)/1e3
|
||||
if (!process.env.isTTY)
|
||||
console.log(ms)
|
||||
else {
|
||||
const s = Math.round(end[0]*10 + end[1]/1e8)/10
|
||||
const ss = s <= 1 ? '' : ' (' + s + 's)'
|
||||
console.log('%d%s', ms, ss)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,160 +0,0 @@
|
|||
'use strict'
|
||||
|
||||
const iterations = +process.env.BENCH_TEST_ITERATION || 100
|
||||
const testCount = +process.env.BENCH_TEST_COUNT || 20
|
||||
|
||||
const tests = [
|
||||
'baseline',
|
||||
'minipass',
|
||||
'extend-minipass',
|
||||
'through2',
|
||||
'extend-through2',
|
||||
'passthrough',
|
||||
'extend-transform'
|
||||
]
|
||||
|
||||
const manyOpts = [ 'many', 'single' ]
|
||||
const typeOpts = [ 'buffer', 'string', 'object' ]
|
||||
|
||||
const main = () => {
|
||||
const spawn = require('child_process').spawn
|
||||
const node = process.execPath
|
||||
|
||||
const results = {}
|
||||
|
||||
const testSet = []
|
||||
tests.forEach(t =>
|
||||
manyOpts.forEach(many =>
|
||||
typeOpts.forEach(type =>
|
||||
new Array(testCount).join(',').split(',').forEach(() =>
|
||||
t !== 'baseline' || (many === 'single' && type === 'object')
|
||||
? testSet.push([t, many, type]) : null))))
|
||||
|
||||
let didFirst = false
|
||||
const mainRunTest = t => {
|
||||
if (!t)
|
||||
return afterMain(results)
|
||||
|
||||
const k = t.join('\t')
|
||||
if (!results[k]) {
|
||||
results[k] = []
|
||||
if (!didFirst)
|
||||
didFirst = true
|
||||
else
|
||||
process.stderr.write('\n')
|
||||
|
||||
process.stderr.write(k + ' #')
|
||||
} else {
|
||||
process.stderr.write('#')
|
||||
}
|
||||
|
||||
const c = spawn(node, [__filename].concat(t), {
|
||||
stdio: [ 'ignore', 'pipe', 2 ]
|
||||
})
|
||||
let out = ''
|
||||
c.stdout.on('data', c => out += c)
|
||||
c.on('close', (code, signal) => {
|
||||
if (code || signal)
|
||||
throw new Error('failed: ' + code + ' ' + signal)
|
||||
results[k].push(+out)
|
||||
mainRunTest(testSet.shift())
|
||||
})
|
||||
}
|
||||
|
||||
mainRunTest(testSet.shift())
|
||||
}
|
||||
|
||||
const afterMain = results => {
|
||||
console.log('test\tmany\ttype\tops/s\tmean\tmedian\tmax\tmin' +
|
||||
'\tstdev\trange\traw')
|
||||
// get the mean, median, stddev, and range of each test
|
||||
Object.keys(results).forEach(test => {
|
||||
const k = results[test].sort((a, b) => a - b)
|
||||
const min = k[0]
|
||||
const max = k[ k.length - 1 ]
|
||||
const range = max - min
|
||||
const sum = k.reduce((a,b) => a + b, 0)
|
||||
const mean = sum / k.length
|
||||
const ops = iterations / mean * 1000
|
||||
const devs = k.map(n => n - mean).map(n => n * n)
|
||||
const avgdev = devs.reduce((a,b) => a + b, 0) / k.length
|
||||
const stdev = Math.pow(avgdev, 0.5)
|
||||
const median = k.length % 2 ? k[Math.floor(k.length / 2)] :
|
||||
(k[k.length/2] + k[k.length/2+1])/2
|
||||
console.log(
|
||||
'%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%s', test, round(ops),
|
||||
round(mean), round(median),
|
||||
max, min, round(stdev), round(range),
|
||||
k.join('\t'))
|
||||
})
|
||||
}
|
||||
|
||||
const round = num => Math.round(num * 1000)/1000
|
||||
|
||||
const test = (testname, many, type) => {
|
||||
const timer = require('./lib/timer.js')
|
||||
const Class = getClass(testname)
|
||||
|
||||
const done = timer()
|
||||
runTest(Class, many, type, iterations, done)
|
||||
}
|
||||
|
||||
// don't blow up the stack! loop unless deferred
|
||||
const runTest = (Class, many, type, iterations, done) => {
|
||||
const Nullsink = require('./lib/nullsink.js')
|
||||
const Numbers = require('./lib/numbers.js')
|
||||
const opt = {}
|
||||
if (type === 'string')
|
||||
opt.encoding = 'utf8'
|
||||
else if (type === 'object')
|
||||
opt.objectMode = true
|
||||
|
||||
while (iterations--) {
|
||||
let finished = false
|
||||
let inloop = true
|
||||
const after = iterations === 0 ? done
|
||||
: () => {
|
||||
if (iterations === 0)
|
||||
done()
|
||||
else if (inloop)
|
||||
finished = true
|
||||
else
|
||||
runTest(Class, many, type, iterations, done)
|
||||
}
|
||||
|
||||
const out = new Nullsink().on('finish', after)
|
||||
let sink = Class ? new Class(opt) : out
|
||||
|
||||
if (many && Class)
|
||||
sink = sink
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
.pipe(new Class(opt))
|
||||
|
||||
if (sink !== out)
|
||||
sink.pipe(out)
|
||||
|
||||
new Numbers(opt).pipe(sink)
|
||||
|
||||
// keep tight-looping if the stream is done already
|
||||
if (!finished) {
|
||||
inloop = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getClass = testname =>
|
||||
testname === 'through2' ? require('through2').obj
|
||||
: testname === 'extend-through2' ? require('./lib/extend-through2.js')
|
||||
: testname === 'minipass' ? require('../')
|
||||
: testname === 'extend-minipass' ? require('./lib/extend-minipass.js')
|
||||
: testname === 'passthrough' ? require('stream').PassThrough
|
||||
: testname === 'extend-transform' ? require('./lib/extend-transform.js')
|
||||
: null
|
||||
|
||||
if (!process.argv[2])
|
||||
main()
|
||||
else
|
||||
test(process.argv[2], process.argv[3] === 'many', process.argv[4])
|
|
@ -1,545 +0,0 @@
|
|||
'use strict'
|
||||
const EE = require('events')
|
||||
const Stream = require('stream')
|
||||
const Yallist = require('yallist')
|
||||
const SD = require('string_decoder').StringDecoder
|
||||
|
||||
const EOF = Symbol('EOF')
|
||||
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
|
||||
const EMITTED_END = Symbol('emittedEnd')
|
||||
const EMITTING_END = Symbol('emittingEnd')
|
||||
const CLOSED = Symbol('closed')
|
||||
const READ = Symbol('read')
|
||||
const FLUSH = Symbol('flush')
|
||||
const FLUSHCHUNK = Symbol('flushChunk')
|
||||
const ENCODING = Symbol('encoding')
|
||||
const DECODER = Symbol('decoder')
|
||||
const FLOWING = Symbol('flowing')
|
||||
const PAUSED = Symbol('paused')
|
||||
const RESUME = Symbol('resume')
|
||||
const BUFFERLENGTH = Symbol('bufferLength')
|
||||
const BUFFERPUSH = Symbol('bufferPush')
|
||||
const BUFFERSHIFT = Symbol('bufferShift')
|
||||
const OBJECTMODE = Symbol('objectMode')
|
||||
const DESTROYED = Symbol('destroyed')
|
||||
|
||||
// TODO remove when Node v8 support drops
|
||||
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
|
||||
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|
||||
|| Symbol('asyncIterator not implemented')
|
||||
const ITERATOR = doIter && Symbol.iterator
|
||||
|| Symbol('iterator not implemented')
|
||||
|
||||
// events that mean 'the stream is over'
|
||||
// these are treated specially, and re-emitted
|
||||
// if they are listened for after emitting.
|
||||
const isEndish = ev =>
|
||||
ev === 'end' ||
|
||||
ev === 'finish' ||
|
||||
ev === 'prefinish'
|
||||
|
||||
const isArrayBuffer = b => b instanceof ArrayBuffer ||
|
||||
typeof b === 'object' &&
|
||||
b.constructor &&
|
||||
b.constructor.name === 'ArrayBuffer' &&
|
||||
b.byteLength >= 0
|
||||
|
||||
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
|
||||
|
||||
module.exports = class Minipass extends Stream {
|
||||
constructor (options) {
|
||||
super()
|
||||
this[FLOWING] = false
|
||||
// whether we're explicitly paused
|
||||
this[PAUSED] = false
|
||||
this.pipes = new Yallist()
|
||||
this.buffer = new Yallist()
|
||||
this[OBJECTMODE] = options && options.objectMode || false
|
||||
if (this[OBJECTMODE])
|
||||
this[ENCODING] = null
|
||||
else
|
||||
this[ENCODING] = options && options.encoding || null
|
||||
if (this[ENCODING] === 'buffer')
|
||||
this[ENCODING] = null
|
||||
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
|
||||
this[EOF] = false
|
||||
this[EMITTED_END] = false
|
||||
this[EMITTING_END] = false
|
||||
this[CLOSED] = false
|
||||
this.writable = true
|
||||
this.readable = true
|
||||
this[BUFFERLENGTH] = 0
|
||||
this[DESTROYED] = false
|
||||
}
|
||||
|
||||
get bufferLength () { return this[BUFFERLENGTH] }
|
||||
|
||||
get encoding () { return this[ENCODING] }
|
||||
set encoding (enc) {
|
||||
if (this[OBJECTMODE])
|
||||
throw new Error('cannot set encoding in objectMode')
|
||||
|
||||
if (this[ENCODING] && enc !== this[ENCODING] &&
|
||||
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
|
||||
throw new Error('cannot change encoding')
|
||||
|
||||
if (this[ENCODING] !== enc) {
|
||||
this[DECODER] = enc ? new SD(enc) : null
|
||||
if (this.buffer.length)
|
||||
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
|
||||
}
|
||||
|
||||
this[ENCODING] = enc
|
||||
}
|
||||
|
||||
setEncoding (enc) {
|
||||
this.encoding = enc
|
||||
}
|
||||
|
||||
get objectMode () { return this[OBJECTMODE] }
|
||||
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (this[EOF])
|
||||
throw new Error('write after end')
|
||||
|
||||
if (this[DESTROYED]) {
|
||||
this.emit('error', Object.assign(
|
||||
new Error('Cannot call write after a stream was destroyed'),
|
||||
{ code: 'ERR_STREAM_DESTROYED' }
|
||||
))
|
||||
return true
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
|
||||
if (!encoding)
|
||||
encoding = 'utf8'
|
||||
|
||||
// convert array buffers and typed array views into buffers
|
||||
// at some point in the future, we may want to do the opposite!
|
||||
// leave strings and buffers as-is
|
||||
// anything else switches us into object mode
|
||||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||||
if (isArrayBufferView(chunk))
|
||||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
|
||||
else if (isArrayBuffer(chunk))
|
||||
chunk = Buffer.from(chunk)
|
||||
else if (typeof chunk !== 'string')
|
||||
// use the setter so we throw if we have encoding set
|
||||
this.objectMode = true
|
||||
}
|
||||
|
||||
// this ensures at this point that the chunk is a buffer or string
|
||||
// don't buffer it up or send it to the decoder
|
||||
if (!this.objectMode && !chunk.length) {
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
if (cb)
|
||||
cb()
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
// fast-path writing strings of same encoding to a stream with
|
||||
// an empty buffer, skipping the buffer/decoder dance
|
||||
if (typeof chunk === 'string' && !this[OBJECTMODE] &&
|
||||
// unless it is a string already ready for us to use
|
||||
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
|
||||
chunk = Buffer.from(chunk, encoding)
|
||||
}
|
||||
|
||||
if (Buffer.isBuffer(chunk) && this[ENCODING])
|
||||
chunk = this[DECODER].write(chunk)
|
||||
|
||||
if (this.flowing) {
|
||||
// if we somehow have something in the buffer, but we think we're
|
||||
// flowing, then we need to flush all that out first, or we get
|
||||
// chunks coming in out of order. Can't emit 'drain' here though,
|
||||
// because we're mid-write, so that'd be bad.
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this[FLUSH](true)
|
||||
this.emit('data', chunk)
|
||||
} else
|
||||
this[BUFFERPUSH](chunk)
|
||||
|
||||
if (this[BUFFERLENGTH] !== 0)
|
||||
this.emit('readable')
|
||||
|
||||
if (cb)
|
||||
cb()
|
||||
|
||||
return this.flowing
|
||||
}
|
||||
|
||||
read (n) {
|
||||
if (this[DESTROYED])
|
||||
return null
|
||||
|
||||
try {
|
||||
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
|
||||
return null
|
||||
|
||||
if (this[OBJECTMODE])
|
||||
n = null
|
||||
|
||||
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
|
||||
if (this.encoding)
|
||||
this.buffer = new Yallist([
|
||||
Array.from(this.buffer).join('')
|
||||
])
|
||||
else
|
||||
this.buffer = new Yallist([
|
||||
Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
|
||||
])
|
||||
}
|
||||
|
||||
return this[READ](n || null, this.buffer.head.value)
|
||||
} finally {
|
||||
this[MAYBE_EMIT_END]()
|
||||
}
|
||||
}
|
||||
|
||||
[READ] (n, chunk) {
|
||||
if (n === chunk.length || n === null)
|
||||
this[BUFFERSHIFT]()
|
||||
else {
|
||||
this.buffer.head.value = chunk.slice(n)
|
||||
chunk = chunk.slice(0, n)
|
||||
this[BUFFERLENGTH] -= n
|
||||
}
|
||||
|
||||
this.emit('data', chunk)
|
||||
|
||||
if (!this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
|
||||
return chunk
|
||||
}
|
||||
|
||||
end (chunk, encoding, cb) {
|
||||
if (typeof chunk === 'function')
|
||||
cb = chunk, chunk = null
|
||||
if (typeof encoding === 'function')
|
||||
cb = encoding, encoding = 'utf8'
|
||||
if (chunk)
|
||||
this.write(chunk, encoding)
|
||||
if (cb)
|
||||
this.once('end', cb)
|
||||
this[EOF] = true
|
||||
this.writable = false
|
||||
|
||||
// if we haven't written anything, then go ahead and emit,
|
||||
// even if we're not reading.
|
||||
// we'll re-emit if a new 'end' listener is added anyway.
|
||||
// This makes MP more suitable to write-only use cases.
|
||||
if (this.flowing || !this[PAUSED])
|
||||
this[MAYBE_EMIT_END]()
|
||||
return this
|
||||
}
|
||||
|
||||
// don't let the internal resume be overwritten
|
||||
[RESUME] () {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
this[PAUSED] = false
|
||||
this[FLOWING] = true
|
||||
this.emit('resume')
|
||||
if (this.buffer.length)
|
||||
this[FLUSH]()
|
||||
else if (this[EOF])
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
resume () {
|
||||
return this[RESUME]()
|
||||
}
|
||||
|
||||
pause () {
|
||||
this[FLOWING] = false
|
||||
this[PAUSED] = true
|
||||
}
|
||||
|
||||
get destroyed () {
|
||||
return this[DESTROYED]
|
||||
}
|
||||
|
||||
get flowing () {
|
||||
return this[FLOWING]
|
||||
}
|
||||
|
||||
get paused () {
|
||||
return this[PAUSED]
|
||||
}
|
||||
|
||||
[BUFFERPUSH] (chunk) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] += 1
|
||||
else
|
||||
this[BUFFERLENGTH] += chunk.length
|
||||
return this.buffer.push(chunk)
|
||||
}
|
||||
|
||||
[BUFFERSHIFT] () {
|
||||
if (this.buffer.length) {
|
||||
if (this[OBJECTMODE])
|
||||
this[BUFFERLENGTH] -= 1
|
||||
else
|
||||
this[BUFFERLENGTH] -= this.buffer.head.value.length
|
||||
}
|
||||
return this.buffer.shift()
|
||||
}
|
||||
|
||||
[FLUSH] (noDrain) {
|
||||
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
|
||||
|
||||
if (!noDrain && !this.buffer.length && !this[EOF])
|
||||
this.emit('drain')
|
||||
}
|
||||
|
||||
[FLUSHCHUNK] (chunk) {
|
||||
return chunk ? (this.emit('data', chunk), this.flowing) : false
|
||||
}
|
||||
|
||||
pipe (dest, opts) {
|
||||
if (this[DESTROYED])
|
||||
return
|
||||
|
||||
const ended = this[EMITTED_END]
|
||||
opts = opts || {}
|
||||
if (dest === process.stdout || dest === process.stderr)
|
||||
opts.end = false
|
||||
else
|
||||
opts.end = opts.end !== false
|
||||
|
||||
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
|
||||
this.pipes.push(p)
|
||||
|
||||
dest.on('drain', p.ondrain)
|
||||
this[RESUME]()
|
||||
// piping an ended stream ends immediately
|
||||
if (ended && p.opts.end)
|
||||
p.dest.end()
|
||||
return dest
|
||||
}
|
||||
|
||||
addListener (ev, fn) {
|
||||
return this.on(ev, fn)
|
||||
}
|
||||
|
||||
on (ev, fn) {
|
||||
try {
|
||||
return super.on(ev, fn)
|
||||
} finally {
|
||||
if (ev === 'data' && !this.pipes.length && !this.flowing)
|
||||
this[RESUME]()
|
||||
else if (isEndish(ev) && this[EMITTED_END]) {
|
||||
super.emit(ev)
|
||||
this.removeAllListeners(ev)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get emittedEnd () {
|
||||
return this[EMITTED_END]
|
||||
}
|
||||
|
||||
[MAYBE_EMIT_END] () {
|
||||
if (!this[EMITTING_END] &&
|
||||
!this[EMITTED_END] &&
|
||||
!this[DESTROYED] &&
|
||||
this.buffer.length === 0 &&
|
||||
this[EOF]) {
|
||||
this[EMITTING_END] = true
|
||||
this.emit('end')
|
||||
this.emit('prefinish')
|
||||
this.emit('finish')
|
||||
if (this[CLOSED])
|
||||
this.emit('close')
|
||||
this[EMITTING_END] = false
|
||||
}
|
||||
}
|
||||
|
||||
emit (ev, data) {
|
||||
// error and close are only events allowed after calling destroy()
|
||||
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
|
||||
return
|
||||
else if (ev === 'data') {
|
||||
if (!data)
|
||||
return
|
||||
|
||||
if (this.pipes.length)
|
||||
this.pipes.forEach(p =>
|
||||
p.dest.write(data) === false && this.pause())
|
||||
} else if (ev === 'end') {
|
||||
// only actual end gets this treatment
|
||||
if (this[EMITTED_END] === true)
|
||||
return
|
||||
|
||||
this[EMITTED_END] = true
|
||||
this.readable = false
|
||||
|
||||
if (this[DECODER]) {
|
||||
data = this[DECODER].end()
|
||||
if (data) {
|
||||
this.pipes.forEach(p => p.dest.write(data))
|
||||
super.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
this.pipes.forEach(p => {
|
||||
p.dest.removeListener('drain', p.ondrain)
|
||||
if (p.opts.end)
|
||||
p.dest.end()
|
||||
})
|
||||
} else if (ev === 'close') {
|
||||
this[CLOSED] = true
|
||||
// don't emit close before 'end' and 'finish'
|
||||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: replace with a spread operator when Node v4 support drops
|
||||
const args = new Array(arguments.length)
|
||||
args[0] = ev
|
||||
args[1] = data
|
||||
if (arguments.length > 2) {
|
||||
for (let i = 2; i < arguments.length; i++) {
|
||||
args[i] = arguments[i]
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return super.emit.apply(this, args)
|
||||
} finally {
|
||||
if (!isEndish(ev))
|
||||
this[MAYBE_EMIT_END]()
|
||||
else
|
||||
this.removeAllListeners(ev)
|
||||
}
|
||||
}
|
||||
|
||||
// const all = await stream.collect()
|
||||
collect () {
|
||||
const buf = []
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength = 0
|
||||
// set the promise first, in case an error is raised
|
||||
// by triggering the flow here.
|
||||
const p = this.promise()
|
||||
this.on('data', c => {
|
||||
buf.push(c)
|
||||
if (!this[OBJECTMODE])
|
||||
buf.dataLength += c.length
|
||||
})
|
||||
return p.then(() => buf)
|
||||
}
|
||||
|
||||
// const data = await stream.concat()
|
||||
concat () {
|
||||
return this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this.collect().then(buf =>
|
||||
this[OBJECTMODE]
|
||||
? Promise.reject(new Error('cannot concat in objectMode'))
|
||||
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
|
||||
}
|
||||
|
||||
// stream.promise().then(() => done, er => emitted error)
|
||||
promise () {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
|
||||
this.on('end', () => resolve())
|
||||
this.on('error', er => reject(er))
|
||||
})
|
||||
}
|
||||
|
||||
// for await (let chunk of stream)
|
||||
[ASYNCITERATOR] () {
|
||||
const next = () => {
|
||||
const res = this.read()
|
||||
if (res !== null)
|
||||
return Promise.resolve({ done: false, value: res })
|
||||
|
||||
if (this[EOF])
|
||||
return Promise.resolve({ done: true })
|
||||
|
||||
let resolve = null
|
||||
let reject = null
|
||||
const onerr = er => {
|
||||
this.removeListener('data', ondata)
|
||||
this.removeListener('end', onend)
|
||||
reject(er)
|
||||
}
|
||||
const ondata = value => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('end', onend)
|
||||
this.pause()
|
||||
resolve({ value: value, done: !!this[EOF] })
|
||||
}
|
||||
const onend = () => {
|
||||
this.removeListener('error', onerr)
|
||||
this.removeListener('data', ondata)
|
||||
resolve({ done: true })
|
||||
}
|
||||
const ondestroy = () => onerr(new Error('stream destroyed'))
|
||||
return new Promise((res, rej) => {
|
||||
reject = rej
|
||||
resolve = res
|
||||
this.once(DESTROYED, ondestroy)
|
||||
this.once('error', onerr)
|
||||
this.once('end', onend)
|
||||
this.once('data', ondata)
|
||||
})
|
||||
}
|
||||
|
||||
return { next }
|
||||
}
|
||||
|
||||
// for (let chunk of stream)
|
||||
[ITERATOR] () {
|
||||
const next = () => {
|
||||
const value = this.read()
|
||||
const done = value === null
|
||||
return { value, done }
|
||||
}
|
||||
return { next }
|
||||
}
|
||||
|
||||
destroy (er) {
|
||||
if (this[DESTROYED]) {
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else
|
||||
this.emit(DESTROYED)
|
||||
return this
|
||||
}
|
||||
|
||||
this[DESTROYED] = true
|
||||
|
||||
// throw away all buffered data, it's never coming out
|
||||
this.buffer = new Yallist()
|
||||
this[BUFFERLENGTH] = 0
|
||||
|
||||
if (typeof this.close === 'function' && !this[CLOSED])
|
||||
this.close()
|
||||
|
||||
if (er)
|
||||
this.emit('error', er)
|
||||
else // if no error to emit, still reject pending promises
|
||||
this.emit(DESTROYED)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
static isStream (s) {
|
||||
return !!s && (s instanceof Minipass || s instanceof Stream ||
|
||||
s instanceof EE && (
|
||||
typeof s.pipe === 'function' || // readable
|
||||
(typeof s.write === 'function' && typeof s.end === 'function') // writable
|
||||
))
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"name": "minipass",
|
||||
"version": "3.1.3",
|
||||
"description": "minimal implementation of a PassThrough stream",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"yallist": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"end-of-stream": "^1.4.0",
|
||||
"tap": "^14.6.5",
|
||||
"through2": "^2.0.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish --tag=next",
|
||||
"postpublish": "git push origin --follow-tags"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/isaacs/minipass.git"
|
||||
},
|
||||
"keywords": [
|
||||
"passthrough",
|
||||
"stream"
|
||||
],
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"tap": {
|
||||
"check-coverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
const t = require('tap')
|
||||
|
||||
const stringToArrayBuffer = s => {
|
||||
const buf = Buffer.from(s)
|
||||
const ab = new ArrayBuffer(buf.length)
|
||||
const ui = new Uint8Array(ab)
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
ui[i] = buf[i]
|
||||
}
|
||||
return ab
|
||||
}
|
||||
|
||||
const MP = require('../')
|
||||
|
||||
const e = { encoding: 'utf8' }
|
||||
t.test('write array buffer', t => {
|
||||
const ab = stringToArrayBuffer('hello world')
|
||||
const mp = new MP(e).end(ab)
|
||||
t.equal(mp.objectMode, false, 'array buffer does not trigger objectMode')
|
||||
return mp.concat().then(s => t.equal(s, 'hello world'))
|
||||
})
|
||||
|
||||
t.test('write uint8 typed array', t => {
|
||||
const ab = stringToArrayBuffer('hello world')
|
||||
const ui = new Uint8Array(ab, 0, 5)
|
||||
const mp = new MP(e).end(ui)
|
||||
t.equal(mp.objectMode, false, 'typed array does not trigger objectMode')
|
||||
return mp.concat().then(s => t.equal(s, 'hello'))
|
||||
})
|
||||
|
||||
const {
|
||||
ArrayBuffer: VMArrayBuffer,
|
||||
Uint8Array: VMUint8Array,
|
||||
} = require('vm').runInNewContext('({ArrayBuffer,Uint8Array})')
|
||||
|
||||
const stringToVMArrayBuffer = s => {
|
||||
const buf = Buffer.from(s)
|
||||
const ab = new VMArrayBuffer(buf.length)
|
||||
const ui = new VMUint8Array(ab)
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
ui[i] = buf[i]
|
||||
}
|
||||
return ab
|
||||
}
|
||||
|
||||
t.test('write vm array buffer', t => {
|
||||
const ab = stringToVMArrayBuffer('hello world')
|
||||
const mp = new MP(e).end(ab)
|
||||
t.equal(mp.objectMode, false, 'array buffer does not trigger objectMode')
|
||||
return mp.concat().then(s => t.equal(s, 'hello world'))
|
||||
})
|
||||
|
||||
t.test('write uint8 typed array', t => {
|
||||
const ab = stringToVMArrayBuffer('hello world')
|
||||
const ui = new VMUint8Array(ab, 0, 5)
|
||||
const mp = new MP(e).end(ui)
|
||||
t.equal(mp.objectMode, false, 'typed array does not trigger objectMode')
|
||||
return mp.concat().then(s => t.equal(s, 'hello'))
|
||||
})
|
|
@ -1,12 +0,0 @@
|
|||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
t.test('do not auto-end empty stream if explicitly paused', t => {
|
||||
const mp = new MP()
|
||||
let waitedForEnd = false
|
||||
mp.pause()
|
||||
setTimeout(() => {
|
||||
waitedForEnd = true
|
||||
mp.resume()
|
||||
})
|
||||
return mp.end().promise().then(() => t.ok(waitedForEnd, 'waited for end'))
|
||||
})
|
|
@ -1,455 +0,0 @@
|
|||
const MiniPass = require('../')
|
||||
const t = require('tap')
|
||||
const EE = require('events').EventEmitter
|
||||
|
||||
t.test('some basic piping and writing', async t => {
|
||||
let mp = new MiniPass({ encoding: 'base64' })
|
||||
t.notOk(mp.flowing)
|
||||
mp.flowing = true
|
||||
t.notOk(mp.flowing)
|
||||
t.equal(mp.encoding, 'base64')
|
||||
mp.encoding = null
|
||||
t.equal(mp.encoding, null)
|
||||
t.equal(mp.readable, true)
|
||||
t.equal(mp.writable, true)
|
||||
t.equal(mp.write('hello'), false)
|
||||
let dest = new MiniPass()
|
||||
let sawDestData = false
|
||||
dest.once('data', chunk => {
|
||||
sawDestData = true
|
||||
t.isa(chunk, Buffer)
|
||||
})
|
||||
t.equal(mp.pipe(dest), dest, 'pipe returns dest')
|
||||
t.ok(sawDestData, 'got data becasue pipe() flushes')
|
||||
t.equal(mp.write('bye'), true, 'write() returns true when flowing')
|
||||
dest.pause()
|
||||
t.equal(mp.write('after pause'), false, 'false when dest is paused')
|
||||
t.equal(mp.write('after false'), false, 'false when not flowing')
|
||||
t.equal(dest.buffer.length, 1, '1 item is buffered in dest')
|
||||
t.equal(mp.buffer.length, 1, '1 item buffered in src')
|
||||
dest.resume()
|
||||
t.equal(dest.buffer.length, 0, 'nothing is buffered in dest')
|
||||
t.equal(mp.buffer.length, 0, 'nothing buffered in src')
|
||||
})
|
||||
|
||||
t.test('unicode splitting', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
t.plan(2)
|
||||
t.equal(mp.encoding, 'utf8')
|
||||
mp.on('data', chunk => {
|
||||
t.equal(chunk, butterfly)
|
||||
})
|
||||
const butterbuf = Buffer.from([0xf0, 0x9f, 0xa6, 0x8b])
|
||||
mp.write(butterbuf.slice(0, 1))
|
||||
mp.write(butterbuf.slice(1, 2))
|
||||
mp.write(butterbuf.slice(2, 3))
|
||||
mp.write(butterbuf.slice(3, 4))
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('unicode splitting with setEncoding', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'hex' })
|
||||
t.plan(4)
|
||||
t.equal(mp.encoding, 'hex')
|
||||
mp.setEncoding('hex')
|
||||
t.equal(mp.encoding, 'hex')
|
||||
mp.setEncoding('utf8')
|
||||
t.equal(mp.encoding, 'utf8')
|
||||
mp.on('data', chunk => {
|
||||
t.equal(chunk, butterfly)
|
||||
})
|
||||
const butterbuf = Buffer.from([0xf0, 0x9f, 0xa6, 0x8b])
|
||||
mp.write(butterbuf.slice(0, 1))
|
||||
mp.write(butterbuf.slice(1, 2))
|
||||
mp.write(butterbuf.slice(2, 3))
|
||||
mp.write(butterbuf.slice(3, 4))
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('base64 -> utf8 piping', t => {
|
||||
t.plan(1)
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'base64' })
|
||||
const dest = new MiniPass({ encoding: 'utf8' })
|
||||
mp.pipe(dest)
|
||||
let out = ''
|
||||
dest.on('data', c => out += c)
|
||||
dest.on('end', _ =>
|
||||
t.equal(Buffer.from(out, 'base64').toString('utf8'), butterfly))
|
||||
mp.write(butterfly)
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('utf8 -> base64 piping', t => {
|
||||
t.plan(1)
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
const dest = new MiniPass({ encoding: 'base64' })
|
||||
mp.pipe(dest)
|
||||
let out = ''
|
||||
dest.on('data', c => out += c)
|
||||
dest.on('end', _ =>
|
||||
t.equal(Buffer.from(out, 'base64').toString('utf8'), butterfly))
|
||||
mp.write(butterfly)
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('read method', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
mp.on('data', c => t.equal(c, butterfly))
|
||||
mp.pause()
|
||||
t.equal(mp.paused, true, 'paused=true')
|
||||
mp.write(Buffer.from(butterfly))
|
||||
t.equal(mp.read(5), null)
|
||||
t.equal(mp.read(0), null)
|
||||
t.same(mp.read(2), butterfly)
|
||||
})
|
||||
|
||||
t.test('read with no args', async t => {
|
||||
t.test('buffer -> string', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
mp.on('data', c => t.equal(c, butterfly))
|
||||
mp.pause()
|
||||
const butterbuf = Buffer.from(butterfly)
|
||||
mp.write(butterbuf.slice(0, 2))
|
||||
mp.write(butterbuf.slice(2))
|
||||
t.same(mp.read(), butterfly)
|
||||
t.equal(mp.read(), null)
|
||||
})
|
||||
|
||||
t.test('buffer -> buffer', async t => {
|
||||
const butterfly = Buffer.from('🦋')
|
||||
const mp = new MiniPass()
|
||||
mp.on('data', c => t.same(c, butterfly))
|
||||
mp.pause()
|
||||
mp.write(butterfly.slice(0, 2))
|
||||
mp.write(butterfly.slice(2))
|
||||
t.same(mp.read(), butterfly)
|
||||
t.equal(mp.read(), null)
|
||||
})
|
||||
|
||||
t.test('string -> buffer', async t => {
|
||||
const butterfly = '🦋'
|
||||
const butterbuf = Buffer.from(butterfly)
|
||||
const mp = new MiniPass()
|
||||
mp.on('data', c => t.same(c, butterbuf))
|
||||
mp.pause()
|
||||
mp.write(butterfly)
|
||||
t.same(mp.read(), butterbuf)
|
||||
t.equal(mp.read(), null)
|
||||
})
|
||||
|
||||
t.test('string -> string', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
mp.on('data', c => t.equal(c, butterfly))
|
||||
mp.pause()
|
||||
mp.write(butterfly[0])
|
||||
mp.write(butterfly[1])
|
||||
t.same(mp.read(), butterfly)
|
||||
t.equal(mp.read(), null)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('partial read', async t => {
|
||||
const butterfly = '🦋'
|
||||
const mp = new MiniPass()
|
||||
const butterbuf = Buffer.from(butterfly)
|
||||
mp.write(butterbuf.slice(0, 1))
|
||||
mp.write(butterbuf.slice(1, 2))
|
||||
mp.write(butterbuf.slice(2, 3))
|
||||
mp.write(butterbuf.slice(3, 4))
|
||||
t.equal(mp.read(5), null)
|
||||
t.equal(mp.read(0), null)
|
||||
t.same(mp.read(2), butterbuf.slice(0, 2))
|
||||
t.same(mp.read(2), butterbuf.slice(2, 4))
|
||||
})
|
||||
|
||||
t.test('write after end', async t => {
|
||||
const mp = new MiniPass()
|
||||
let sawEnd = false
|
||||
mp.on('end', _ => sawEnd = true)
|
||||
mp.end('not empty')
|
||||
t.throws(_ => mp.write('nope'))
|
||||
t.notOk(sawEnd, 'should not get end event yet (not flowing)')
|
||||
mp.resume()
|
||||
t.equal(mp.paused, false, 'paused=false after resume')
|
||||
t.ok(sawEnd, 'should get end event after resume()')
|
||||
})
|
||||
|
||||
t.test('write after end', async t => {
|
||||
const mp = new MiniPass()
|
||||
let sawEnd = 0
|
||||
mp.on('end', _ => sawEnd++)
|
||||
mp.end() // empty
|
||||
t.ok(mp.emittedEnd, 'emitted end event')
|
||||
t.throws(_ => mp.write('nope'))
|
||||
t.equal(sawEnd, 1, 'should get end event (empty stream)')
|
||||
mp.resume()
|
||||
t.ok(sawEnd, 2, 'should get end event again, I guess?')
|
||||
})
|
||||
|
||||
t.test('write cb', async t => {
|
||||
const mp = new MiniPass()
|
||||
let calledCb = false
|
||||
mp.write('ok', () => calledCb = true)
|
||||
t.ok(calledCb)
|
||||
})
|
||||
|
||||
t.test('end with chunk', async t => {
|
||||
let out = ''
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
let sawEnd = false
|
||||
mp.prependListener('end', _ => sawEnd = true)
|
||||
mp.addListener('data', c => out += c)
|
||||
let endCb = false
|
||||
mp.end('ok', _ => endCb = true)
|
||||
t.equal(out, 'ok')
|
||||
t.ok(sawEnd, 'should see end event')
|
||||
t.ok(endCb, 'end cb should get called')
|
||||
})
|
||||
|
||||
t.test('no drain if could not entirely drain on resume', async t => {
|
||||
const mp = new MiniPass()
|
||||
const dest = new MiniPass({ encoding: 'buffer' })
|
||||
t.equal(mp.write('foo'), false)
|
||||
t.equal(mp.write('bar'), false)
|
||||
t.equal(mp.write('baz'), false)
|
||||
t.equal(mp.write('qux'), false)
|
||||
mp.on('drain', _ => t.fail('should not drain'))
|
||||
mp.pipe(dest)
|
||||
})
|
||||
|
||||
t.test('end with chunk pending', async t => {
|
||||
const mp = new MiniPass()
|
||||
t.equal(mp.write('foo'), false)
|
||||
t.equal(mp.write('626172', 'hex'), false)
|
||||
t.equal(mp.write('baz'), false)
|
||||
t.equal(mp.write('qux'), false)
|
||||
let sawEnd = false
|
||||
mp.on('end', _ => sawEnd = true)
|
||||
let endCb = false
|
||||
mp.end(_ => endCb = true)
|
||||
t.notOk(endCb, 'endcb should not happen yet')
|
||||
t.notOk(sawEnd, 'should not see end yet')
|
||||
let out = ''
|
||||
mp.on('data', c => out += c)
|
||||
t.ok(sawEnd, 'see end after flush')
|
||||
t.ok(endCb, 'end cb after flush')
|
||||
t.equal(out, 'foobarbazqux')
|
||||
})
|
||||
|
||||
t.test('pipe to stderr does not throw', t => {
|
||||
const spawn = require('child_process').spawn
|
||||
const module = JSON.stringify(require.resolve('../'))
|
||||
const fs = require('fs')
|
||||
const file = __dirname + '/prog.js'
|
||||
fs.writeFileSync(file, `
|
||||
const MP = require(${module})
|
||||
const mp = new MP()
|
||||
mp.pipe(process.stderr)
|
||||
mp.end("hello")
|
||||
`)
|
||||
let err = ''
|
||||
return new Promise(res => {
|
||||
const child = spawn(process.execPath, [file])
|
||||
child.stderr.on('data', c => err += c)
|
||||
child.on('close', (code, signal) => {
|
||||
t.equal(code, 0)
|
||||
t.equal(signal, null)
|
||||
t.equal(err, 'hello')
|
||||
fs.unlinkSync(file)
|
||||
res()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('emit works with many args', t => {
|
||||
const mp = new MiniPass()
|
||||
t.plan(2)
|
||||
mp.on('foo', function (a, b, c, d, e, f, g) {
|
||||
t.same([a,b,c,d,e,f,g], [1,2,3,4,5,6,7])
|
||||
t.equal(arguments.length, 7)
|
||||
})
|
||||
mp.emit('foo', 1, 2, 3, 4, 5, 6, 7)
|
||||
})
|
||||
|
||||
t.test('emit drain on resume, even if no flush', t => {
|
||||
const mp = new MiniPass()
|
||||
mp.encoding = 'utf8'
|
||||
|
||||
const chunks = []
|
||||
class SlowStream extends EE {
|
||||
write (chunk) {
|
||||
chunks.push(chunk)
|
||||
setTimeout(_ => this.emit('drain'))
|
||||
return false
|
||||
}
|
||||
end () { return this.write() }
|
||||
}
|
||||
|
||||
const ss = new SlowStream()
|
||||
|
||||
mp.pipe(ss)
|
||||
t.ok(mp.flowing, 'flowing, because piped')
|
||||
t.equal(mp.write('foo'), false, 'write() returns false, backpressure')
|
||||
t.equal(mp.buffer.length, 0, 'buffer len is 0')
|
||||
t.equal(mp.flowing, false, 'flowing false, awaiting drain')
|
||||
t.same(chunks, ['foo'], 'chunk made it through')
|
||||
mp.once('drain', _ => {
|
||||
t.pass('received mp drain event')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('save close for end', t => {
|
||||
const mp = new MiniPass()
|
||||
let ended = false
|
||||
mp.on('close', _ => {
|
||||
t.equal(ended, true, 'end before close')
|
||||
t.end()
|
||||
})
|
||||
mp.on('end', _ => {
|
||||
t.equal(ended, false, 'only end once')
|
||||
ended = true
|
||||
})
|
||||
|
||||
mp.emit('close')
|
||||
mp.end('foo')
|
||||
t.equal(ended, false, 'no end until flushed')
|
||||
mp.resume()
|
||||
})
|
||||
|
||||
t.test('eos works', t => {
|
||||
const eos = require('end-of-stream')
|
||||
const mp = new MiniPass()
|
||||
|
||||
eos(mp, er => {
|
||||
if (er)
|
||||
throw er
|
||||
t.end()
|
||||
})
|
||||
|
||||
mp.emit('close')
|
||||
mp.end('foo')
|
||||
mp.resume()
|
||||
})
|
||||
|
||||
t.test('bufferLength property', t => {
|
||||
const eos = require('end-of-stream')
|
||||
const mp = new MiniPass()
|
||||
mp.write('a')
|
||||
mp.write('a')
|
||||
mp.write('a')
|
||||
mp.write('a')
|
||||
mp.write('a')
|
||||
mp.write('a')
|
||||
|
||||
t.equal(mp.bufferLength, 6)
|
||||
t.equal(mp.read(7), null)
|
||||
t.equal(mp.read(3).toString(), 'aaa')
|
||||
t.equal(mp.bufferLength, 3)
|
||||
t.equal(mp.read().toString(), 'aaa')
|
||||
t.equal(mp.bufferLength, 0)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('emit resume event on resume', t => {
|
||||
const mp = new MiniPass()
|
||||
t.plan(3)
|
||||
mp.on('resume', _ => t.pass('got resume event'))
|
||||
mp.end('asdf')
|
||||
t.equal(mp.flowing, false, 'not flowing yet')
|
||||
mp.resume()
|
||||
t.equal(mp.flowing, true, 'flowing after resume')
|
||||
})
|
||||
|
||||
t.test('objectMode', t => {
|
||||
const mp = new MiniPass({ objectMode: true })
|
||||
t.equal(mp.objectMode, true, 'objectMode getter returns value')
|
||||
mp.objectMode = false
|
||||
t.equal(mp.objectMode, true, 'objectMode getter is read-only')
|
||||
const a = { a: 1 }
|
||||
const b = { b: 1 }
|
||||
const out = []
|
||||
mp.on('data', c => out.push(c))
|
||||
mp.on('end', _ => {
|
||||
t.equal(out.length, 2)
|
||||
t.equal(out[0], a)
|
||||
t.equal(out[1], b)
|
||||
t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
|
||||
t.end()
|
||||
})
|
||||
t.ok(mp.write(a))
|
||||
t.ok(mp.write(b))
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('objectMode no encoding', t => {
|
||||
const mp = new MiniPass({
|
||||
objectMode: true,
|
||||
encoding: 'utf8'
|
||||
})
|
||||
t.equal(mp.encoding, null)
|
||||
const a = { a: 1 }
|
||||
const b = { b: 1 }
|
||||
const out = []
|
||||
mp.on('data', c => out.push(c))
|
||||
mp.on('end', _ => {
|
||||
t.equal(out.length, 2)
|
||||
t.equal(out[0], a)
|
||||
t.equal(out[1], b)
|
||||
t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
|
||||
t.end()
|
||||
})
|
||||
t.ok(mp.write(a))
|
||||
t.ok(mp.write(b))
|
||||
mp.end()
|
||||
})
|
||||
|
||||
t.test('objectMode read() and buffering', t => {
|
||||
const mp = new MiniPass({ objectMode: true })
|
||||
const a = { a: 1 }
|
||||
const b = { b: 1 }
|
||||
t.notOk(mp.write(a))
|
||||
t.notOk(mp.write(b))
|
||||
t.equal(mp.read(2), a)
|
||||
t.equal(mp.read(), b)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('set encoding in object mode throws', async t =>
|
||||
t.throws(_ => new MiniPass({ objectMode: true }).encoding = 'utf8',
|
||||
new Error('cannot set encoding in objectMode')))
|
||||
|
||||
t.test('set encoding again throws', async t =>
|
||||
t.throws(_ => {
|
||||
const mp = new MiniPass({ encoding: 'hex' })
|
||||
mp.write('ok')
|
||||
mp.encoding = 'utf8'
|
||||
}, new Error('cannot change encoding')))
|
||||
|
||||
t.test('set encoding with existing buffer', async t => {
|
||||
const mp = new MiniPass()
|
||||
const butterfly = '🦋'
|
||||
const butterbuf = Buffer.from(butterfly)
|
||||
mp.write(butterbuf.slice(0, 1))
|
||||
mp.write(butterbuf.slice(1, 2))
|
||||
mp.setEncoding('utf8')
|
||||
mp.write(butterbuf.slice(2))
|
||||
t.equal(mp.read(), butterfly)
|
||||
})
|
||||
|
||||
t.test('end:false', async t => {
|
||||
t.plan(1)
|
||||
const mp = new MiniPass({ encoding: 'utf8' })
|
||||
const d = new MiniPass({ encoding: 'utf8' })
|
||||
d.end = () => t.threw(new Error('no end no exit no way out'))
|
||||
d.on('data', c => t.equal(c, 'this is fine'))
|
||||
mp.pipe(d, { end: false })
|
||||
mp.end('this is fine')
|
||||
})
|
|
@ -1,8 +0,0 @@
|
|||
const MP = require('../')
|
||||
const mp = new MP()
|
||||
const poop = new Error('poop')
|
||||
mp.on('end', () => mp.emit('error', poop))
|
||||
mp.end('foo')
|
||||
const t = require('tap')
|
||||
t.test('promise catches error emitted on end', t =>
|
||||
t.rejects(mp.collect(), poop))
|
|
@ -1,58 +0,0 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const MP = require('../index.js')
|
||||
|
||||
t.test('basic', async t => {
|
||||
const mp = new MP()
|
||||
let i = 5
|
||||
const interval = setInterval(() => {
|
||||
if (i --> 0)
|
||||
mp.write('foo\n')
|
||||
else {
|
||||
clearInterval(interval)
|
||||
mp.end()
|
||||
}
|
||||
})
|
||||
mp.setEncoding('utf8')
|
||||
const all = await mp.collect()
|
||||
t.same(all, ['foo\n','foo\n','foo\n','foo\n','foo\n'])
|
||||
})
|
||||
|
||||
t.test('error', async t => {
|
||||
const mp = new MP()
|
||||
const poop = new Error('poop')
|
||||
setTimeout(() => mp.emit('error', poop))
|
||||
await t.rejects(mp.collect(), poop)
|
||||
})
|
||||
|
||||
t.test('concat strings', async t => {
|
||||
const mp = new MP({ encoding: 'utf8' })
|
||||
mp.write('foo')
|
||||
mp.write('bar')
|
||||
mp.write('baz')
|
||||
mp.end()
|
||||
await t.resolveMatch(mp.concat(), 'foobarbaz')
|
||||
})
|
||||
t.test('concat buffers', async t => {
|
||||
const mp = new MP()
|
||||
mp.write('foo')
|
||||
mp.write('bar')
|
||||
mp.write('baz')
|
||||
mp.end()
|
||||
await t.resolveMatch(mp.concat(), Buffer.from('foobarbaz'))
|
||||
})
|
||||
|
||||
t.test('concat objectMode fails', async t => {
|
||||
const a = new MP({objectMode: true})
|
||||
await t.rejects(a.concat(), new Error('cannot concat in objectMode'))
|
||||
const b = new MP()
|
||||
b.write('asdf')
|
||||
setTimeout(() => b.end({foo:1}))
|
||||
await t.rejects(b.concat(), new Error('cannot concat in objectMode'))
|
||||
})
|
||||
|
||||
t.test('collect does not set bodyLength in objectMode', t =>
|
||||
new MP({objectMode: true}).end({a:1}).collect().then(data => {
|
||||
t.equal(typeof data.dataLength, 'undefined')
|
||||
t.deepEqual(data, [{a:1}])
|
||||
}))
|
|
@ -1,28 +0,0 @@
|
|||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
|
||||
t.test('writing to a non-bool returning write() does not pause', t => {
|
||||
const booleyStream = new (class extends MP {
|
||||
write (chunk, encoding, cb) {
|
||||
// no return!
|
||||
super.write(chunk, encoding, cb)
|
||||
}
|
||||
})
|
||||
|
||||
const booleyStream2 = new (class extends MP {
|
||||
write (chunk, encoding, cb) {
|
||||
// no return!
|
||||
super.write(chunk, encoding, cb)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
const src = new MP
|
||||
|
||||
try {
|
||||
return src.pipe(booleyStream).pipe(booleyStream2).concat().then(d =>
|
||||
t.equal(d.toString(), 'hello', 'got data all the way through'))
|
||||
} finally {
|
||||
src.end('hello')
|
||||
}
|
||||
})
|
|
@ -1,107 +0,0 @@
|
|||
const MP = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
t.match(new MP(), { destroy: Function }, 'destroy is implemented')
|
||||
|
||||
{
|
||||
const mp = new MP()
|
||||
t.equal(mp.destroy(), mp, 'destroy() returns this')
|
||||
}
|
||||
|
||||
t.equal(new MP().destroy().destroyed, true, 'destroy() sets .destroyed getter')
|
||||
|
||||
t.test('destroy(er) emits error', t => {
|
||||
const mp = new MP()
|
||||
const er = new Error('skoarchhh')
|
||||
const ret = t.rejects(() => mp.promise(), er)
|
||||
mp.destroy(er)
|
||||
return ret
|
||||
})
|
||||
|
||||
t.test('calls close if present', t => {
|
||||
const mp = new MP()
|
||||
let closeCalled = false
|
||||
mp.close = () => {
|
||||
closeCalled = true
|
||||
setTimeout(() => mp.emit('close'))
|
||||
}
|
||||
mp.on('close', () => {
|
||||
t.equal(closeCalled, true, 'called close')
|
||||
t.end()
|
||||
})
|
||||
mp.destroy()
|
||||
})
|
||||
|
||||
t.test('destroy a second time just emits the error', t => {
|
||||
const mp = new MP()
|
||||
mp.destroy()
|
||||
const er = new Error('skoarchhh')
|
||||
const ret = t.rejects(() => mp.promise(), er)
|
||||
mp.destroy(er)
|
||||
return ret
|
||||
})
|
||||
|
||||
t.test('destroy with no error rejects a promise', t => {
|
||||
const mp = new MP()
|
||||
const ret = t.rejects(() => mp.promise(), { message: 'stream destroyed' })
|
||||
mp.destroy()
|
||||
return ret
|
||||
})
|
||||
|
||||
t.test('destroy with no error second time rejects a promise', t => {
|
||||
const mp = new MP()
|
||||
mp.destroy()
|
||||
const ret = t.rejects(() => mp.promise(), { message: 'stream destroyed' })
|
||||
mp.destroy()
|
||||
return ret
|
||||
})
|
||||
|
||||
t.test('emits after destruction are ignored', t => {
|
||||
const mp = new MP().destroy()
|
||||
mp.on('foo', () => t.fail('should not emit foo after destroy'))
|
||||
mp.emit('foo')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('pipe after destroy is a no-op', t => {
|
||||
const p = new MP()
|
||||
p.write('foo')
|
||||
p.destroy()
|
||||
const q = new MP()
|
||||
q.on('data', c => t.fail('should not get data, upstream is destroyed'))
|
||||
p.pipe(q)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('resume after destroy is a no-op', t => {
|
||||
const p = new MP()
|
||||
p.pause()
|
||||
p.on('resume', () => t.fail('should not see resume event after destroy'))
|
||||
p.destroy()
|
||||
p.resume()
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('read after destroy always returns null', t => {
|
||||
const p = new MP({ encoding: 'utf8' })
|
||||
p.write('hello, ')
|
||||
p.write('world')
|
||||
t.equal(p.read(), 'hello, world')
|
||||
p.write('destroyed!')
|
||||
p.destroy()
|
||||
t.equal(p.read(), null)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('write after destroy emits error', t => {
|
||||
const p = new MP()
|
||||
p.destroy()
|
||||
p.on('error', er => {
|
||||
t.match(er, {
|
||||
message: 'Cannot call write after a stream was destroyed',
|
||||
code: 'ERR_STREAM_DESTROYED',
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
p.write('nope')
|
||||
})
|
|
@ -1,17 +0,0 @@
|
|||
const Minipass = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
class FancyEnder extends Minipass {
|
||||
emit (ev, ...data) {
|
||||
if (ev === 'end')
|
||||
this.emit('foo')
|
||||
return super.emit(ev, ...data)
|
||||
}
|
||||
}
|
||||
|
||||
const mp = new FancyEnder()
|
||||
let fooEmits = 0
|
||||
mp.on('foo', () => fooEmits++)
|
||||
mp.end('asdf')
|
||||
mp.resume()
|
||||
t.equal(fooEmits, 1, 'should only see one event emitted')
|
|
@ -1,30 +0,0 @@
|
|||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
|
||||
const enc = { encoding: 'utf8' }
|
||||
|
||||
t.test('encoding and immediate end', t =>
|
||||
new MP(enc).end().concat().then(s => t.equal(s, '')))
|
||||
|
||||
t.test('encoding and end with empty string', t =>
|
||||
new MP(enc).end('').concat().then(s => t.equal(s, '')))
|
||||
|
||||
t.test('encoding and end with empty buffer', t =>
|
||||
new MP(enc).end(Buffer.alloc(0)).concat().then(s => t.equal(s, '')))
|
||||
|
||||
t.test('encoding and end with stringly empty buffer', t =>
|
||||
new MP(enc).end(Buffer.from('')).concat().then(s => t.equal(s, '')))
|
||||
|
||||
t.test('encoding and write then end with empty buffer', t => {
|
||||
const mp = new MP(enc)
|
||||
mp.write('ok')
|
||||
return mp.end(Buffer.alloc(0)).concat().then(s => t.equal(s, 'ok'))
|
||||
})
|
||||
|
||||
t.test('encoding and write then end with empty string', t => {
|
||||
const mp = new MP(enc)
|
||||
mp.write('ok')
|
||||
return mp.end('').concat().then(s => t.equal(s, 'ok'))
|
||||
})
|
||||
|
||||
t.test('empty write with cb', t => new MP(enc).write(Buffer.from(''), t.end))
|
|
@ -1,4 +0,0 @@
|
|||
const MP = require('../')
|
||||
const t = require('tap')
|
||||
t.test('empty end emits end without reading', t =>
|
||||
new MP().end().promise())
|
|
@ -1,32 +0,0 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
|
||||
t.test('end is not missed if listened to after end', t => {
|
||||
t.plan(1)
|
||||
const mp = new MP()
|
||||
mp.end('foo')
|
||||
let str = ''
|
||||
mp.on('data', d => str += d)
|
||||
mp.on('end', () => t.equal(str, 'foo'))
|
||||
})
|
||||
|
||||
t.test('listening for any endish event after end re-emits', t => {
|
||||
t.plan(1)
|
||||
const mp = new MP()
|
||||
mp.end('foo')
|
||||
let str = ''
|
||||
mp.on('data', d => str += d)
|
||||
mp.on('finish', () => t.equal(str, 'foo'))
|
||||
})
|
||||
|
||||
t.test('all endish listeners get called', t => {
|
||||
t.plan(3)
|
||||
const mp = new MP()
|
||||
let str = ''
|
||||
mp.on('finish', () => t.equal(str, 'foo'))
|
||||
mp.on('prefinish', () => t.equal(str, 'foo'))
|
||||
mp.end('foo')
|
||||
mp.on('data', d => str += d)
|
||||
mp.on('end', () => t.equal(str, 'foo'))
|
||||
})
|
|
@ -1,4 +0,0 @@
|
|||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
const mp = new MP()
|
||||
t.equal(mp.end(), mp, 'end returns this')
|
|
@ -1,40 +0,0 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
|
||||
t.test('organic', t => {
|
||||
const butterfly = Buffer.from([0x61, 0xf0, 0x9f, 0xa6, 0x8b, 0xf0])
|
||||
const mp = new MP({ encoding: 'utf8' })
|
||||
|
||||
let sawEnd = 0
|
||||
mp.on('end', () =>
|
||||
t.equal(sawEnd++, 0, 'should not have seen the end yet'))
|
||||
|
||||
mp.once('data', () => {
|
||||
mp.once('data', () => {
|
||||
mp.once('data', () => mp.end())
|
||||
mp.end()
|
||||
})
|
||||
mp.end(butterfly.slice(3))
|
||||
})
|
||||
mp.end(butterfly.slice(0, 3))
|
||||
|
||||
t.equal(sawEnd, 1, 'should see end exactly once')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('manufactured', t => {
|
||||
// *should* already be impossible, but just to be even more
|
||||
// deliberate, in case that wasn't the only way it could happen
|
||||
const mp = new MP()
|
||||
let sawEnd = 0
|
||||
mp.on('end', () => {
|
||||
t.equal(sawEnd++, 0, 'should not have seen the end yet')
|
||||
mp.emit('end')
|
||||
})
|
||||
mp.emit('end')
|
||||
mp.emit('end')
|
||||
|
||||
t.equal(sawEnd, 1, 'should see end exactly once')
|
||||
t.end()
|
||||
})
|
|
@ -1,44 +0,0 @@
|
|||
// this is a minimal reproduction of a pretty complex interaction between
|
||||
// minipass-pipeline and a slow-draining proxy stream, which occurred in
|
||||
// make-fetch-happen. https://github.com/npm/npm-registry-fetch/issues/23
|
||||
// The pipeline in question was a wrapper that tee'd data into the cache,
|
||||
// which is a slow-draining sink stream. When multiple chunks come through,
|
||||
// the Pipeline's buffer is holding a chunk, but the Pipeline itself is in
|
||||
// flowing mode. The solution is to always drain the buffer before emitting
|
||||
// 'data', if there is other data waiting to be emitted.
|
||||
const Minipass = require('../')
|
||||
const t = require('tap')
|
||||
|
||||
const src = new Minipass({ encoding: 'utf8' })
|
||||
const mid = new Minipass({ encoding: 'utf8' })
|
||||
const proxy = new Minipass({ encoding: 'utf8' })
|
||||
mid.write = function (chunk, encoding, cb) {
|
||||
Minipass.prototype.write.call(this, chunk, encoding, cb)
|
||||
return proxy.write(chunk, encoding, cb)
|
||||
}
|
||||
proxy.on('drain', chunk => mid.emit('drain'))
|
||||
proxy.on('readable', () => setTimeout(() => proxy.read()))
|
||||
|
||||
const dest = new Minipass({ encoding: 'utf8' })
|
||||
src.write('a')
|
||||
src.write('b')
|
||||
|
||||
const pipeline = new (class Pipeline extends Minipass {
|
||||
constructor (opt) {
|
||||
super(opt)
|
||||
dest.on('data', c => super.write(c))
|
||||
dest.on('end', () => super.end())
|
||||
}
|
||||
emit (ev, ...args) {
|
||||
if (ev === 'resume')
|
||||
dest.resume()
|
||||
return super.emit(ev, ...args)
|
||||
}
|
||||
})({ encoding: 'utf8'})
|
||||
|
||||
mid.pipe(dest)
|
||||
src.pipe(mid)
|
||||
t.test('get all data', t => pipeline.concat().then(d => t.equal(d, 'abcd')))
|
||||
src.write('c')
|
||||
src.write('d')
|
||||
src.end()
|
|
@ -1,31 +0,0 @@
|
|||
const MP = require('../')
|
||||
const EE = require('events')
|
||||
const t = require('tap')
|
||||
const Stream = require('stream')
|
||||
|
||||
t.equal(MP.isStream(new MP), true, 'a MiniPass is a stream')
|
||||
t.equal(MP.isStream(new Stream), true, 'a Stream is a stream')
|
||||
t.equal((new MP) instanceof Stream, true, 'a MiniPass is a Stream')
|
||||
const w = new EE()
|
||||
w.write = () => {}
|
||||
w.end = () => {}
|
||||
t.equal(MP.isStream(w), true, 'EE with write() and end() is a stream')
|
||||
const r = new EE()
|
||||
r.pipe = () => {}
|
||||
t.equal(MP.isStream(r), true, 'EE with pipe() is a stream')
|
||||
t.equal(MP.isStream(new Stream.Readable()), true, 'Stream.Readable() is a stream')
|
||||
t.equal(MP.isStream(new Stream.Writable()), true, 'Stream.Writable() is a stream')
|
||||
t.equal(MP.isStream(new Stream.Duplex()), true, 'Stream.Duplex() is a stream')
|
||||
t.equal(MP.isStream(new Stream.Transform()), true, 'Stream.Transform() is a stream')
|
||||
t.equal(MP.isStream(new Stream.PassThrough()), true, 'Stream.PassThrough() is a stream')
|
||||
t.equal(MP.isStream(new (class extends MP {})), true, 'extends MP is a stream')
|
||||
t.equal(MP.isStream(new EE), false, 'EE without streaminess is not a stream')
|
||||
t.equal(MP.isStream({
|
||||
write(){},
|
||||
end(){},
|
||||
pipe(){},
|
||||
}), false, 'non-EE is not a stream')
|
||||
t.equal(MP.isStream('hello'), false, 'string is not a stream')
|
||||
t.equal(MP.isStream(99), false, 'number is not a stream')
|
||||
t.equal(MP.isStream(() => {}), false, 'function is not a stream')
|
||||
t.equal(MP.isStream(null), false, 'null is not a stream')
|
|
@ -1,17 +0,0 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
global._MP_NO_ITERATOR_SYMBOLS_ = '1'
|
||||
const MP = require('../index.js')
|
||||
const mp = new MP
|
||||
mp.write('foo')
|
||||
setTimeout(() => mp.end())
|
||||
t.throws(() => {
|
||||
for (let x of mp) {
|
||||
t.fail('should not be iterable')
|
||||
}
|
||||
})
|
||||
t.rejects(async () => {
|
||||
for await (let x of mp) {
|
||||
t.fail('should not be async iterable')
|
||||
}
|
||||
})
|
|
@ -1,320 +0,0 @@
|
|||
'use strict'
|
||||
const t = require('tap')
|
||||
const MP = require('../index.js')
|
||||
|
||||
t.test('sync iteration', t => {
|
||||
const cases = {
|
||||
'buffer': [ null, [
|
||||
Buffer.from('ab'),
|
||||
Buffer.from('cd'),
|
||||
Buffer.from('e')
|
||||
]],
|
||||
'string': [ { encoding: 'utf8' }, ['ab', 'cd', 'e']],
|
||||
'object': [ { objectMode: true }, ['a', 'b', 'c', 'd', 'e']]
|
||||
}
|
||||
const runTest = (c, opt, expect) => {
|
||||
t.test(c, t => {
|
||||
const result = []
|
||||
const mp = new MP(opt)
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
for (let letter of mp) {
|
||||
result.push(letter)
|
||||
}
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
result.push.call(result, ...mp)
|
||||
mp.write('e')
|
||||
mp.end()
|
||||
for (let letter of mp) {
|
||||
result.push(letter) // e
|
||||
}
|
||||
for (let letter of mp) {
|
||||
result.push(letter) // nothing
|
||||
}
|
||||
t.match(result, expect)
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
for (let c in cases) {
|
||||
runTest(c, cases[c][0], cases[c][1])
|
||||
}
|
||||
|
||||
t.test('destroy while iterating', t => {
|
||||
const mp = new MP({ objectMode: true })
|
||||
mp.write('a')
|
||||
mp.write('b')
|
||||
mp.write('c')
|
||||
mp.write('d')
|
||||
const result = []
|
||||
for (let letter of mp) {
|
||||
result.push(letter)
|
||||
mp.destroy()
|
||||
}
|
||||
t.same(result, ['a'])
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('async iteration', t => {
|
||||
const expect = [
|
||||
'start\n',
|
||||
'foo\n',
|
||||
'foo\n',
|
||||
'foo\n',
|
||||
'foo\n',
|
||||
'foo\n',
|
||||
'bar\n'
|
||||
]
|
||||
|
||||
t.test('end immediate', async t => {
|
||||
const mp = new MP({ encoding: 'utf8' })
|
||||
let i = 5
|
||||
|
||||
mp.write('start\n')
|
||||
const inter = setInterval(() => {
|
||||
if (i --> 0)
|
||||
mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.end('bar\n')
|
||||
clearInterval(inter)
|
||||
}
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result, expect)
|
||||
})
|
||||
|
||||
t.test('end later', async t => {
|
||||
const mp = new MP({ encoding: 'utf8' })
|
||||
let i = 5
|
||||
|
||||
mp.write('start\n')
|
||||
const inter = setInterval(() => {
|
||||
if (i --> 0)
|
||||
mp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
mp.write('bar\n')
|
||||
setTimeout(() => mp.end())
|
||||
clearInterval(inter)
|
||||
}
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result, expect)
|
||||
})
|
||||
|
||||
t.test('multiple chunks at once, asyncly', async t => {
|
||||
const mp = new MP()
|
||||
let i = 6
|
||||
const write = () => {
|
||||
if (i === 6)
|
||||
mp.write(Buffer.from('start\n', 'utf8'))
|
||||
else if (i > 0)
|
||||
mp.write('foo\n')
|
||||
else if (i === 0) {
|
||||
mp.end('bar\n')
|
||||
clearInterval(inter)
|
||||
}
|
||||
i--
|
||||
}
|
||||
|
||||
const inter = setInterval(() => {
|
||||
write()
|
||||
write()
|
||||
write()
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.toString()).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('multiple object chunks at once, asyncly', async t => {
|
||||
const mp = new MP({ objectMode: true })
|
||||
let i = 6
|
||||
const write = () => {
|
||||
if (i === 6)
|
||||
mp.write(['start\n'])
|
||||
else if (i > 0)
|
||||
mp.write(['foo\n'])
|
||||
else if (i === 0) {
|
||||
mp.end(['bar\n'])
|
||||
clearInterval(inter)
|
||||
}
|
||||
i--
|
||||
}
|
||||
|
||||
const inter = setInterval(() => {
|
||||
write()
|
||||
write()
|
||||
write()
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.join('')).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('all chunks at once, asyncly', async t => {
|
||||
const mp = new MP()
|
||||
setTimeout(() => {
|
||||
mp.write(Buffer.from('start\n', 'utf8'))
|
||||
for (let i = 0; i < 5; i++) {
|
||||
mp.write('foo\n')
|
||||
}
|
||||
mp.end('bar\n')
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.toString()).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('all object chunks at once, asyncly', async t => {
|
||||
const mp = new MP({ objectMode: true })
|
||||
setTimeout(() => {
|
||||
mp.write(['start\n'])
|
||||
for (let i = 0; i < 5; i++) {
|
||||
mp.write(['foo\n'])
|
||||
}
|
||||
mp.end(['bar\n'])
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.join('')).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('all object chunks at once, syncly', async t => {
|
||||
const mp = new MP({ objectMode: true })
|
||||
mp.write(['start\n'])
|
||||
for (let i = 0; i < 5; i++) {
|
||||
mp.write(['foo\n'])
|
||||
}
|
||||
mp.end(['bar\n'])
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.join('')).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('pipe in all at once', async t => {
|
||||
const inp = new MP({ encoding: 'utf8' })
|
||||
const mp = new MP({ encoding: 'utf8' })
|
||||
inp.pipe(mp)
|
||||
|
||||
let i = 5
|
||||
inp.write('start\n')
|
||||
const inter = setInterval(() => {
|
||||
if (i --> 0)
|
||||
inp.write(Buffer.from('foo\n', 'utf8'))
|
||||
else {
|
||||
inp.end('bar\n')
|
||||
clearInterval(inter)
|
||||
}
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result, expect)
|
||||
})
|
||||
|
||||
t.test('pipe in multiple object chunks at once, asyncly', async t => {
|
||||
const mp = new MP({ objectMode: true })
|
||||
const inp = new MP({ objectMode: true })
|
||||
inp.pipe(mp)
|
||||
|
||||
let i = 5
|
||||
inp.write(['start\n'])
|
||||
const write = () => {
|
||||
if (i > 0)
|
||||
inp.write(['foo\n'])
|
||||
else if (i === 0) {
|
||||
inp.end(['bar\n'])
|
||||
clearInterval(inter)
|
||||
}
|
||||
i--
|
||||
}
|
||||
|
||||
const inter = setInterval(() => {
|
||||
write()
|
||||
write()
|
||||
write()
|
||||
})
|
||||
|
||||
const result = []
|
||||
for await (let x of mp)
|
||||
result.push(x)
|
||||
|
||||
t.same(result.map(x => x.join('')).join(''), expect.join(''))
|
||||
})
|
||||
|
||||
t.test('throw error', async t => {
|
||||
const mp = new MP()
|
||||
const poop = new Error('poop')
|
||||
setTimeout(() => {
|
||||
mp.read = () => { throw poop }
|
||||
mp.end('this is fine')
|
||||
})
|
||||
const result = []
|
||||
const run = async () => {
|
||||
for await (let x of mp) {
|
||||
result.push(x)
|
||||
}
|
||||
}
|
||||
|
||||
await t.rejects(run, poop)
|
||||
})
|
||||
|
||||
t.test('emit error', async t => {
|
||||
const mp = new MP()
|
||||
const poop = new Error('poop')
|
||||
setTimeout(() => mp.emit('error', poop))
|
||||
const result = []
|
||||
const run = async () => {
|
||||
for await (let x of mp) {
|
||||
result.push(x)
|
||||
}
|
||||
}
|
||||
|
||||
await t.rejects(run, poop)
|
||||
})
|
||||
|
||||
t.test('destroy', async t => {
|
||||
const mp = new MP()
|
||||
const poop = new Error('poop')
|
||||
setTimeout(() => mp.destroy())
|
||||
const result = []
|
||||
const run = async () => {
|
||||
for await (let x of mp) {
|
||||
result.push(x)
|
||||
}
|
||||
}
|
||||
|
||||
await t.rejects(run, { message: 'stream destroyed' })
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
|
@ -1,15 +0,0 @@
|
|||
const t = require('tap')
|
||||
const MP = require('../')
|
||||
t.test('pipe from ended stream', t => {
|
||||
const from = new MP()
|
||||
from.end().on('end', () => {
|
||||
t.equal(from.emittedEnd, true, 'from already emitted end')
|
||||
from.pipe(new MP()).on('end', () => t.end())
|
||||
})
|
||||
})
|
||||
|
||||
t.test('pipe from ended stream with a promise', t => {
|
||||
const from = new MP()
|
||||
return from.end().promise().then(() =>
|
||||
from.pipe(new MP()).promise())
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue