Import Upstream version 6.1.11+ds1+~cs6.0.6

This commit is contained in:
zhouganqing 2023-02-17 18:26:51 +08:00
commit ed23cb87a9
252 changed files with 57196 additions and 0 deletions

207
.eslintrc.json Normal file
View File

@ -0,0 +1,207 @@
{
"parserOptions": {
"ecmaVersion": 2018,
"ecmaFeatures": {},
"sourceType": "script"
},
"env": {
"es6": true,
"node": true
},
"plugins": [
"import",
"node",
"promise",
"standard"
],
"globals": {
"document": "readonly",
"navigator": "readonly",
"window": "readonly"
},
"rules": {
"accessor-pairs": "error",
"array-bracket-spacing": ["error", "never"],
"arrow-spacing": ["error", { "before": true, "after": true }],
"block-spacing": ["error", "always"],
"brace-style": ["error", "1tbs", { "allowSingleLine": false }],
"camelcase": ["error", { "properties": "never" }],
"comma-dangle": ["error", {
"arrays": "always-multiline",
"objects": "always-multiline",
"imports": "always-multiline",
"exports": "always-multiline",
"functions": "never"
}],
"comma-spacing": ["error", { "before": false, "after": true }],
"comma-style": ["error", "last"],
"computed-property-spacing": ["error", "never"],
"constructor-super": "error",
"curly": ["error", "multi-or-nest"],
"dot-location": ["error", "property"],
"dot-notation": ["error", { "allowKeywords": true }],
"eol-last": "error",
"eqeqeq": ["error", "always", { "null": "ignore" }],
"func-call-spacing": ["error", "never"],
"generator-star-spacing": ["error", { "before": true, "after": true }],
"handle-callback-err": ["error", "^(err|error)$" ],
"indent": ["error", 2, {
"SwitchCase": 1,
"VariableDeclarator": 1,
"outerIIFEBody": 1,
"MemberExpression": 1,
"FunctionDeclaration": { "parameters": 1, "body": 1 },
"FunctionExpression": { "parameters": 1, "body": 1 },
"CallExpression": { "arguments": 1 },
"ArrayExpression": 1,
"ObjectExpression": 1,
"ImportDeclaration": 1,
"flatTernaryExpressions": true,
"ignoreComments": false,
"ignoredNodes": ["TemplateLiteral *"]
}],
"key-spacing": ["error", { "beforeColon": false, "afterColon": true }],
"keyword-spacing": ["error", { "before": true, "after": true }],
"lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }],
"new-cap": ["error", { "newIsCap": true, "capIsNew": false, "properties": true }],
"new-parens": "error",
"no-array-constructor": "error",
"no-async-promise-executor": "error",
"no-caller": "error",
"no-case-declarations": "error",
"no-class-assign": "error",
"no-compare-neg-zero": "error",
"no-cond-assign": "off",
"no-const-assign": "error",
"no-constant-condition": ["error", { "checkLoops": false }],
"no-control-regex": "error",
"no-debugger": "error",
"no-delete-var": "error",
"no-dupe-args": "error",
"no-dupe-class-members": "error",
"no-dupe-keys": "error",
"no-duplicate-case": "error",
"no-empty-character-class": "error",
"no-empty-pattern": "error",
"no-eval": "error",
"no-ex-assign": "error",
"no-extend-native": "error",
"no-extra-bind": "error",
"no-extra-boolean-cast": "error",
"no-extra-parens": ["error", "functions"],
"no-fallthrough": "off",
"no-floating-decimal": "error",
"no-func-assign": "error",
"no-global-assign": "error",
"no-implied-eval": "error",
"no-inner-declarations": ["error", "functions"],
"no-invalid-regexp": "error",
"no-irregular-whitespace": "error",
"no-iterator": "error",
"no-labels": ["error", { "allowLoop": true, "allowSwitch": false }],
"no-lone-blocks": "error",
"no-misleading-character-class": "error",
"no-prototype-builtins": "error",
"no-useless-catch": "error",
"no-mixed-operators": "off",
"no-mixed-spaces-and-tabs": "error",
"no-multi-spaces": "error",
"no-multi-str": "error",
"no-multiple-empty-lines": ["error", { "max": 1, "maxEOF": 0 }],
"no-negated-in-lhs": "error",
"no-new": "off",
"no-new-func": "error",
"no-new-object": "error",
"no-new-require": "error",
"no-new-symbol": "error",
"no-new-wrappers": "error",
"no-obj-calls": "error",
"no-octal": "error",
"no-octal-escape": "error",
"no-path-concat": "error",
"no-proto": "error",
"no-redeclare": ["error", { "builtinGlobals": false }],
"no-regex-spaces": "error",
"no-return-assign": "off",
"no-self-assign": "off",
"no-self-compare": "error",
"no-sequences": "off",
"no-shadow-restricted-names": "error",
"no-sparse-arrays": "error",
"no-tabs": "error",
"no-template-curly-in-string": "error",
"no-this-before-super": "error",
"no-throw-literal": "off",
"no-trailing-spaces": "error",
"no-undef": "error",
"no-undef-init": "error",
"no-unexpected-multiline": "error",
"no-unmodified-loop-condition": "error",
"no-unneeded-ternary": ["error", { "defaultAssignment": false }],
"no-unreachable": "error",
"no-unsafe-finally": 0,
"no-unsafe-negation": "error",
"no-unused-expressions": ["off"],
"no-unused-vars": ["error", { "vars": "all", "args": "none", "ignoreRestSiblings": true }],
"no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }],
"no-useless-call": "error",
"no-useless-computed-key": "error",
"no-useless-constructor": "error",
"no-useless-escape": "error",
"no-useless-rename": "error",
"no-useless-return": "error",
"no-void": "error",
"no-whitespace-before-property": "error",
"no-with": "error",
"nonblock-statement-body-position": [2, "below"],
"object-curly-newline": "off",
"object-curly-spacing": "off",
"object-property-newline": ["error", { "allowMultiplePropertiesPerLine": true }],
"one-var": ["error", { "initialized": "never" }],
"operator-linebreak": "off",
"padded-blocks": ["error", { "blocks": "never", "switches": "never", "classes": "never" }],
"prefer-const": ["error", {"destructuring": "all"}],
"prefer-promise-reject-errors": "error",
"quote-props": ["error", "as-needed"],
"quotes": ["error", "single", { "avoidEscape": true, "allowTemplateLiterals": true }],
"rest-spread-spacing": ["error", "never"],
"semi": ["error", "never"],
"semi-spacing": ["error", { "before": false, "after": true }],
"space-before-blocks": ["error", "always"],
"space-before-function-paren": ["error", "always"],
"space-in-parens": ["error", "never"],
"space-infix-ops": "error",
"space-unary-ops": ["error", { "words": true, "nonwords": false }],
"spaced-comment": ["error", "always", {
"line": { "markers": ["*package", "!", "/", ",", "="] },
"block": { "balanced": true, "markers": ["*package", "!", ",", ":", "::", "flow-include"], "exceptions": ["*"] }
}],
"symbol-description": "error",
"template-curly-spacing": ["error", "never"],
"template-tag-spacing": ["error", "never"],
"unicode-bom": ["error", "never"],
"use-isnan": "error",
"valid-typeof": ["error", { "requireStringLiterals": true }],
"wrap-iife": ["error", "any", { "functionPrototypeMethods": true }],
"yield-star-spacing": ["error", "both"],
"yoda": ["error", "never"],
"import/export": "error",
"import/first": "error",
"import/no-absolute-path": ["error", { "esmodule": true, "commonjs": true, "amd": false }],
"import/no-duplicates": "error",
"import/no-named-default": "error",
"import/no-webpack-loader-syntax": "error",
"node/no-deprecated-api": "error",
"node/process-exit-as-throw": "error",
"promise/param-names": "off",
"standard/no-callback-literal": "error"
}
}

1
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1 @@
* @npm/cli-team

2
.github/settings.yml vendored Normal file
View File

@ -0,0 +1,2 @@
---
_extends: 'open-source-project-boilerplate'

45
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,45 @@
name: CI
on: [push, pull_request]
jobs:
build:
strategy:
matrix:
node-version: [10.x, 12.x, 14.x, 16.x]
platform:
- os: ubuntu-latest
shell: bash
- os: macos-latest
shell: bash
- os: windows-latest
shell: powershell
fail-fast: false
runs-on: ${{ matrix.platform.os }}
defaults:
run:
shell: ${{ matrix.platform.shell }}
steps:
# there are files here that make windows unhappy by default
- name: Support longpaths
run: git config --global core.longpaths true
- name: Checkout Repository
uses: actions/checkout@v1.1.0
- name: Use Nodejs ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: use latest npm
run: npm i -g npm@latest
- name: Install dependencies
run: npm install
- name: Run Tap Tests
run: npm test -- -c -t0

71
.github/workflows/codeql-analysis.yml vendored Normal file
View File

@ -0,0 +1,71 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ main ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: '27 20 * * 4'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

68
CHANGELOG.md Normal file
View File

@ -0,0 +1,68 @@
# Changelog
## 6.0
- Drop support for node 6 and 8
- fix symlinks and hardlinks on windows being packed with `\`-style path
targets
## 5.0
- Address unpack race conditions using path reservations
- Change large-numbers errors from TypeError to Error
- Add `TAR_*` error codes
- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries
found in an archive
- do not treat ignored entries as an invalid archive
- drop support for node v4
- unpack: conditionally use a file mapping to write files on Windows
- Set more portable 'mode' value in portable mode
- Set `portable` gzip option in portable mode
## 4.4
- Add 'mtime' option to tar creation to force mtime
- unpack: only reuse file fs entries if nlink = 1
- unpack: rename before unlinking files on Windows
- Fix encoding/decoding of base-256 numbers
- Use `stat` instead of `lstat` when checking CWD
- Always provide a callback to fs.close()
## 4.3
- Add 'transform' unpack option
## 4.2
- Fail when zlib fails
## 4.1
- Add noMtime flag for tar creation
## 4.0
- unpack: raise error if cwd is missing or not a dir
- pack: don't drop dots from dotfiles when prefixing
## 3.1
- Support `@file.tar` as an entry argument to copy entries from one tar
file to another.
- Add `noPax` option
- `noResume` option for tar.t
- win32: convert `>|<?:` chars to windows-friendly form
- Exclude mtime for dirs in portable mode
## 3.0
- Minipass-based implementation
- Entirely new API surface, `tar.c()`, `tar.x()` etc., much closer to
system tar semantics
- Massive performance improvement
- Require node 4.x and higher
## 0.x, 1.x, 2.x - 2011-2014
- fstream-based implementation
- slow and kinda bad, but better than npm shelling out to the system `tar`

15
LICENSE Normal file
View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

1042
README.md Normal file

File diff suppressed because it is too large Load Diff

18
fs-minipass/.travis.yml Normal file
View File

@ -0,0 +1,18 @@
language: node_js
sudo: false
node_js:
- node
- 12
- 10
- 8
os:
- linux
cache:
directories:
- $HOME/.npm
notifications:
email: false

15
fs-minipass/LICENSE Normal file
View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

70
fs-minipass/README.md Normal file
View File

@ -0,0 +1,70 @@
# fs-minipass
Filesystem streams based on [minipass](http://npm.im/minipass).
4 classes are exported:
- ReadStream
- ReadStreamSync
- WriteStream
- WriteStreamSync
When using `ReadStreamSync`, all of the data is made available
immediately upon consuming the stream. Nothing is buffered in memory
when the stream is constructed. If the stream is piped to a writer,
then it will synchronously `read()` and emit data into the writer as
fast as the writer can consume it. (That is, it will respect
backpressure.) If you call `stream.read()` then it will read the
entire file and return the contents.
When using `WriteStreamSync`, every write is flushed to the file
synchronously. If your writes all come in a single tick, then it'll
write it all out in a single tick. It's as synchronous as you are.
The async versions work much like their node builtin counterparts,
with the exception of introducing significantly less Stream machinery
overhead.
## USAGE
It's just streams, you pipe them or read() them or write() to them.
```js
const fsm = require('fs-minipass')
const readStream = new fsm.ReadStream('file.txt')
const writeStream = new fsm.WriteStream('output.txt')
writeStream.write('some file header or whatever\n')
readStream.pipe(writeStream)
```
## ReadStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `readSize` The size of reads to do, defaults to 16MB
- `size` The size of the file, if known. Prevents zero-byte read()
call at the end.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the file is done being read.
## WriteStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `mode` The mode to create the file with. Defaults to `0o666`.
- `start` The position in the file to start reading. If not
specified, then the file will start writing at position zero, and be
truncated by default.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the stream is ended.
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
passed in, since file won't be opened in that case. Defaults to
`'a'` if a `pos` is specified, or `'w'` otherwise.

390
fs-minipass/index.js Normal file
View File

@ -0,0 +1,390 @@
'use strict'
const MiniPass = require('minipass')
const EE = require('events').EventEmitter
const fs = require('fs')
let writev = fs.writev
/* istanbul ignore next */
if (!writev) {
// This entire block can be removed if support for earlier than Node.js
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
writev = (fd, iovec, pos, cb) => {
const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
}
}
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
class ReadStream extends MiniPass {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.writable = false
if (typeof path !== 'string')
throw new TypeError('path must be a string')
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number')
this[_read]()
else
this[_open]()
}
get fd () { return this[_fd] }
get path () { return this[_path] }
write () {
throw new TypeError('this is a readable stream')
}
end () {
throw new TypeError('this is a readable stream')
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0) return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
}
}
[_onread] (er, br, buf) {
this[_reading] = false
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
fs.close(this[_fd], _ => this.emit('close'))
this[_fd] = null
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
}
return ret
}
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
default:
return super.emit(ev, data)
}
}
}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0 : fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
try {
fs.closeSync(this[_fd])
} catch (er) {}
this[_fd] = null
this.emit('close')
}
}
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null)
this[_open]()
}
get fd () { return this[_fd] }
get path () { return this[_path] }
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
}
}
end (buf, enc) {
if (buf)
this.write(buf, enc)
this[_ended] = true
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
}
write (buf, enc) {
if (typeof buf === 'string')
buf = Buffer.from(buf, enc)
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
this[_writing] = true
this[_write](buf)
return true
}
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
[_onwrite] (er, bw) {
if (er)
this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
fs.close(this[_fd], _ => this.emit('close'))
this[_fd] = null
}
}
}
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
this[_onopen](null, fd)
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
try {
fs.closeSync(this[_fd])
} catch (er) {}
this[_fd] = null
this.emit('close')
}
}
[_write] (buf) {
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
} catch (er) {
this[_onwrite](er, 0)
}
}
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync

3444
fs-minipass/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

39
fs-minipass/package.json Normal file
View File

@ -0,0 +1,39 @@
{
"name": "fs-minipass",
"version": "2.0.1",
"main": "index.js",
"scripts": {
"test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags"
},
"keywords": [],
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/fs-minipass.git"
},
"bugs": {
"url": "https://github.com/npm/fs-minipass/issues"
},
"homepage": "https://github.com/npm/fs-minipass#readme",
"description": "fs read and write streams based on minipass",
"dependencies": {
"minipass": "^3.0.0"
},
"devDependencies": {
"mutate-fs": "^2.0.1",
"tap": "^14.6.4"
},
"files": [
"index.js"
],
"tap": {
"check-coverage": true
},
"engines": {
"node": ">= 8"
}
}

345
fs-minipass/test/read.js Normal file
View File

@ -0,0 +1,345 @@
'use strict'
const t = require('tap')
const fsm = require('../')
const fs = require('fs')
const EE = require('events').EventEmitter
const mutateFS = require('mutate-fs')
t.test('read the readme', t => {
const p = __dirname + '/../README.md'
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8' })
t.isa(str.fd, 'number')
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8' })
t.isa(str.fd, 'number')
const out = []
let chunk
while (chunk = str.read())
out.push(chunk)
check(t, out.join(''))
})
return t.test('async', t => {
const str = new fsm.ReadStream(p, { encoding: 'utf8' })
t.equal(str.fd, null)
let sawFD
str.on('open', fd => sawFD = fd)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('close', _ => {
t.isa(sawFD, 'number')
check(t, out.join(''))
})
})
})
t.test('read the readme sized', t => {
const p = __dirname + '/../README.md'
const size = fs.statSync(p).size
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
const out = []
let chunk
while (chunk = str.read())
out.push(chunk)
check(t, out.join(''))
})
return t.test('async', t => {
const str = new fsm.ReadStream(p, { encoding: 'utf8', size: size })
t.equal(str.fd, null)
let sawFD
str.on('open', fd => sawFD = fd)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => {
t.isa(sawFD, 'number')
check(t, out.join(''))
})
})
})
t.test('slow sink', t => {
const chunks = []
const EE = require('events').EventEmitter
class SlowStream extends EE {
write (chunk) {
chunks.push(chunk)
setTimeout(_ => this.emit('drain'))
return false
}
end () { return this.write() }
}
const p = __dirname + '/../README.md'
const rm = fs.readFileSync(p, 'utf8')
const check = t => {
t.equal(chunks.join(''), rm)
chunks.length = 0
t.end()
}
t.test('sync', t => {
const ss = new SlowStream()
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', readSize: 5 })
str.pipe(ss)
// trigger a read-while-reading
str.on('readable', _ => str.emit('drain'))
str.on('end', _ => check(t))
})
return t.test('async', t => {
const ss = new SlowStream()
const str = new fsm.ReadStream(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
str.on('end', _ => check(t))
})
})
t.test('zeno reading style', t => {
t.teardown(mutateFS.zenoRead())
const chunks = []
const EE = require('events').EventEmitter
class Collector extends EE {
write (chunk) {
chunks.push(chunk)
return true
}
end () {}
}
const p = __dirname + '/../README.md'
const rm = fs.readFileSync(p, 'utf8')
const check = t => {
t.equal(chunks.join(''), rm)
chunks.length = 0
t.end()
}
t.test('sync', t => {
const ss = new Collector()
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
check(t)
})
return t.test('async', t => {
const ss = new Collector()
const str = new fsm.ReadStream(p, { encoding: 'utf8', readSize: 256 })
str.pipe(ss)
str.on('end', _ => check(t))
})
})
t.test('fail open', t => {
const poop = new Error('poop')
t.teardown(mutateFS.fail('open', poop))
t.throws(_ => new fsm.ReadStreamSync(__filename), poop)
const str = new fsm.ReadStream(__filename)
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('type errors', t => {
const er = new TypeError('this is a readable stream')
t.throws(_ => new fsm.ReadStream(__filename).write('hello'), er)
t.throws(_ => new fsm.ReadStream(__filename).end(), er)
const pathstr = new TypeError('path must be a string')
t.throws(_ => new fsm.ReadStream(1234), pathstr)
t.end()
})
t.test('fail read', t => {
const poop = new Error('poop')
const badFDs = new Set()
const read = fs.read
const readSync = fs.readSync
const open = fs.open
const openSync = fs.openSync
t.teardown(_ => {
fs.open = open
fs.openSync = openSync
fs.read = read
fs.readSync = readSync
})
fs.open = (path, flags, cb) => {
if (path === __filename)
open(path, flags, (er, fd) => {
if (!er)
badFDs.add(fd)
return cb(er, fd)
})
else
open(path, flags, cb)
}
fs.openSync = (path, flags) => {
const fd = openSync(path, flags)
if (path === __filename)
badFDs.add(fd)
return fd
}
fs.read = function (fd, buf, offset, length, pos, cb) {
if (badFDs.has(fd))
process.nextTick(_ => cb(new Error('poop')))
else
read(fd, buf, offset, length, pos, cb)
}
fs.readSync = function (fd, buf, offset, length, pos) {
if (badFDs.has(fd))
throw new Error('poop sync')
}
t.throws(_ => new fsm.ReadStreamSync(__filename))
t.test('async', t => {
const str = new fsm.ReadStream(__filename)
str.once('error', er => {
str.on('error', er => {
console.error('got an other error', er)
})
t.match(er, poop)
t.end()
})
})
t.end()
})
t.test('fd test', t => {
const p = __dirname + '/../README.md'
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res) => {
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', fd: fd })
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''))
})
t.test('sync using read()', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, { encoding: 'utf8', fd: fd })
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
let chunk
while (chunk = str.read())
out.push(chunk)
check(t, out.join(''))
})
t.test('async', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStream(p, { encoding: 'utf8', fd: fd })
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => check(t, out.join('')))
})
t.end()
})
t.test('fd test, no autoClose', t => {
const p = __dirname + '/../README.md'
const rm = fs.readFileSync(p, 'utf8')
const check = (t, res, fd) => {
// will throw EBADF if already closed
fs.closeSync(fd)
t.equal(rm, res)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, {
encoding: 'utf8',
fd: fd,
autoClose: false
})
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
str.on('data', chunk => out.push(chunk))
check(t, out.join(''), fd)
})
t.test('sync using read()', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStreamSync(p, {
encoding: 'utf8',
fd: fd,
autoClose: false
})
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
let chunk
while (chunk = str.read())
out.push(chunk)
check(t, out.join(''), fd)
})
t.test('async', t => {
const fd = fs.openSync(p, 'r')
const str = new fsm.ReadStream(p, {
encoding: 'utf8',
fd: fd,
autoClose: false
})
t.isa(str.fd, 'number')
t.equal(str.path, p)
const out = []
t.equal(str.read(), null)
str.on('data', chunk => out.push(chunk))
str.on('end', _ => check(t, out.join(''), fd))
})
t.end()
})

494
fs-minipass/test/write.js Normal file
View File

@ -0,0 +1,494 @@
'use strict'
const t = require('tap')
const fsm = require('../')
const fs = require('fs')
const EE = require('events').EventEmitter
const mutateFS = require('mutate-fs')
t.test('basic write', t => {
const p = __dirname + '/basic-write'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.end('ok')
s.on('close', _ => check(t))
})
t.end()
})
t.test('write then end', t => {
const p = __dirname + '/write-then-end'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'okend')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p)
s.write('ok')
s.end('end')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.write('ok')
s.end('end')
t.equal(s.fd, null)
t.equal(s.path, p)
s.on('open', fd => {
t.equal(fd, s.fd)
t.isa(fd, 'number')
})
s.on('finish', _ => check(t))
})
t.end()
})
t.test('multiple writes', t => {
const p = __dirname + '/multiple-writes'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'abcdefghijklmnop')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p)
s.write('a')
s.write('b')
s.write('c')
s.write('d')
s.write('e')
s.write('f')
s.write(new Buffer('676869', 'hex'))
s.write('jklm')
s.write(new Buffer('nop'))
s.end()
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p)
s.write('a')
s.write('b')
s.write('c')
s.write('d')
s.write('e')
s.write('f')
s.write(new Buffer('676869', 'hex'))
s.write('jklm')
s.write(new Buffer('nop'))
s.end()
s.on('finish', _ => check(t))
})
t.test('async after open', t => {
const s = new fsm.WriteStream(p)
s.on('open', fd => {
t.isa(fd, 'number')
t.ok(s.write('a'))
t.notOk(s.write('b'))
t.notOk(s.write('c'))
t.notOk(s.write('d'))
t.notOk(s.write('e'))
t.notOk(s.write('f'))
t.notOk(s.write(new Buffer('676869', 'hex')))
t.notOk(s.write('jklm'))
t.notOk(s.write(new Buffer('nop')))
s.end()
s.on('finish', _ => check(t))
})
})
t.test('async after open, drains', t => {
const s = new fsm.WriteStream(p)
s.on('open', fd => {
t.isa(fd, 'number')
t.ok(s.write('a'))
t.notOk(s.write('b'))
s.once('drain', _ => {
t.ok(s.write('c'))
t.notOk(s.write('d'))
t.notOk(s.write('e'))
s.once('drain', _ => {
t.ok(s.write('f'))
t.notOk(s.write(new Buffer('676869', 'hex')))
t.notOk(s.write('jklm'))
t.notOk(s.write(new Buffer('nop')))
s.once('drain', _ => s.end())
})
})
s.on('finish', _ => check(t))
})
})
t.end()
})
t.test('flags', t => {
const p = __dirname + '/flags'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p, { flags: 'w+' }).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { flags: 'w+' })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('mode', t => {
const p = __dirname + '/mode'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
t.equal(fs.statSync(p).mode & 0o777, 0o700)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
new fsm.WriteStreamSync(p, { mode: 0o700 }).end('ok')
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { mode: 0o700 })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('write after end', t => {
const p = __dirname + '/write-after-end'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const s = new fsm.WriteStreamSync(p, { mode: 0o700 })
s.end('ok')
t.throws(_ => s.write('626164', 'hex'),
new Error('write() after end()'))
check(t)
})
t.test('async', t => {
const s = new fsm.WriteStream(p, { mode: 0o700 })
s.end('ok')
s.on('error', e => {
t.match(e, new Error('write() after end()'))
s.on('finish', _ => check(t))
})
s.write('626164', 'hex')
})
t.end()
})
t.test('fd', t => {
const p = __dirname + '/fd'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'w')
new fsm.WriteStreamSync(p, { fd: fd }).end('ok')
check(t)
})
t.test('async', t => {
const fd = fs.openSync(p, 'w')
const s = new fsm.WriteStream(p, { fd: fd })
s.end('ok')
s.on('finish', _ => check(t))
})
t.end()
})
t.test('empty write', t => {
const p = __dirname + '/empty-write'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), '')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
t.test('empty string', t => {
new fsm.WriteStreamSync(p).end('')
check(t)
})
t.test('no chunk to end', t => {
new fsm.WriteStreamSync(p).end('')
check(t)
})
t.end()
})
return t.test('async', t => {
t.test('immediate', t => {
t.test('no chunk to end', t => {
const s = new fsm.WriteStream(p)
s.end()
s.on('finish', _ => check(t))
})
return t.test('empty string', t => {
const s = new fsm.WriteStream(p)
s.end('')
s.on('finish', _ => check(t))
})
})
return t.test('end on open', t => {
t.test('no chunk to end', t => {
const s = new fsm.WriteStream(p)
s.on('open', _ => s.end())
s.on('finish', _ => check(t))
})
return t.test('empty string', t => {
const s = new fsm.WriteStream(p)
s.on('open', _ => s.end(''))
s.on('finish', _ => check(t))
})
})
})
})
t.test('fail open', t => {
const p = __dirname + '/fail-open'
const poop = new Error('poop')
t.teardown(mutateFS.fail('open', poop))
t.throws(_ => new fsm.WriteStreamSync(p), poop)
const str = new fsm.WriteStream(p)
str.on('error', er => {
t.equal(er, poop)
t.end()
})
})
t.test('fail write', t => {
const p = __dirname + '/fail-write'
const poop = new Error('poop')
t.teardown(mutateFS.fail('write', poop))
t.throws(_ => new fsm.WriteStreamSync(p).write('foo'), poop)
const str = new fsm.WriteStream(p)
str.write('foo')
str.on('error', er => {
t.equal(er, poop)
fs.unlinkSync(p)
t.end()
})
})
t.test('positioned write', t => {
const p = __dirname + '/positioned-write'
const write = new Buffer('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = new Buffer(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i + 100] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
new fsm.WriteStreamSync(p, { start: 100 }).end(write)
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 100 })
s.end(write)
s.on('finish', _ => check(t))
})
t.end()
})
t.test('positioned then unpositioned', t => {
const p = __dirname + '/positioned-then-unpositioned'
const write = new Buffer('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = new Buffer(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i + 100] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStreamSync(p, { start: 100 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 100 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
s.on('close', _ => check(t))
})
t.end()
})
t.test('positioned then unpositioned at zero', t => {
const p = __dirname + '/positioned-then-unpositioned'
const write = new Buffer('this is the data that is written')
const data = Buffer.allocUnsafe(256)
for (let i = 0; i < 256; i++) {
data[i] = i
}
const expect = new Buffer(data.toString('hex'), 'hex')
for (let i = 0; i < write.length; i++) {
expect[i] = write[i]
}
const check = t => {
t.same(fs.readFileSync(p), expect)
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStreamSync(p, { start: 0 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
check(t)
})
t.test('async', t => {
fs.writeFileSync(p, data)
const s = new fsm.WriteStream(p, { start: 0 })
s.write(write.slice(0, 20))
s.end(write.slice(20))
s.on('close', _ => check(t))
})
t.end()
})
t.test('fd, no autoClose', t => {
const p = __dirname + '/fd-no-autoclose'
const check = (t, fd) => {
fs.closeSync(fd)
t.equal(fs.readFileSync(p, 'utf8'), 'ok')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const fd = fs.openSync(p, 'w')
new fsm.WriteStreamSync(p, { fd: fd, autoClose: false }).end('ok')
check(t, fd)
})
t.test('async', t => {
const fd = fs.openSync(p, 'w')
const s = new fsm.WriteStream(p, { fd: fd, autoClose: false })
s.end('ok')
s.on('finish', _ => check(t, fd))
})
t.end()
})
t.test('positioned, nonexistent file', t => {
const p = __dirname + '/pos-noent'
const check = t => {
t.equal(fs.readFileSync(p, 'utf8'), '\0\0asdf\0\0\0\0asdf')
fs.unlinkSync(p)
t.end()
}
t.test('sync', t => {
const w = new fsm.WriteStreamSync(p, { start: 10 })
w.end('asdf')
const w2 = new fsm.WriteStreamSync(p, { start: 2 })
w2.end('asdf')
check(t)
})
t.test('async', t => {
const w = new fsm.WriteStream(p, { start: 10 })
w.end('asdf')
w.on('close', _ => {
const w = new fsm.WriteStream(p, { start: 2 })
w.end('asdf')
w.on('close', _ => check(t))
})
})
t.end()
})

18
index.js Normal file
View File

@ -0,0 +1,18 @@
'use strict'
// high-level commands
exports.c = exports.create = exports.Create = require('./lib/create.js')
exports.r = exports.replace = exports.Replace = require('./lib/replace.js')
exports.t = exports.list = exports.List = require('./lib/list.js')
exports.u = exports.update = exports.Update = require('./lib/update.js')
exports.x = exports.extract = exports.Extract = require('./lib/extract.js')
// classes
exports.Pack = require('./lib/pack.js')
exports.Unpack = require('./lib/unpack.js')
exports.Parse = require('./lib/parse.js')
exports.ReadEntry = require('./lib/read-entry.js')
exports.WriteEntry = require('./lib/write-entry.js')
exports.Header = require('./lib/header.js')
exports.Pax = require('./lib/pax.js')
exports.types = require('./lib/types.js')

104
lib/create.js Normal file
View File

@ -0,0 +1,104 @@
'use strict'
// tar -c
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
module.exports = (opt_, files, cb) => {
if (typeof files === 'function')
cb = files
if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
return opt.file && opt.sync ? createFileSync(opt, files)
: opt.file ? createFile(opt, files, cb)
: opt.sync ? createSync(opt, files)
: create(opt, files)
}
const createFileSync = (opt, files) => {
const p = new Pack.Sync(opt)
const stream = new fsm.WriteStreamSync(opt.file, {
mode: opt.mode || 0o666,
})
p.pipe(stream)
addFilesSync(p, files)
}
const createFile = (opt, files, cb) => {
const p = new Pack(opt)
const stream = new fsm.WriteStream(opt.file, {
mode: opt.mode || 0o666,
})
p.pipe(stream)
const promise = new Promise((res, rej) => {
stream.on('error', rej)
stream.on('close', res)
p.on('error', rej)
})
addFilesAsync(p, files)
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
}
p.end()
}
const createSync = (opt, files) => {
const p = new Pack.Sync(opt)
addFilesSync(p, files)
return p
}
const create = (opt, files) => {
const p = new Pack(opt)
addFilesAsync(p, files)
return p
}

107
lib/extract.js Normal file
View File

@ -0,0 +1,107 @@
'use strict'
// tar -x
const hlo = require('./high-level-opt.js')
const Unpack = require('./unpack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const stripSlash = require('./strip-trailing-slashes.js')
module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
return opt.file && opt.sync ? extractFileSync(opt)
: opt.file ? extractFile(opt, cb)
: opt.sync ? extractSync(opt)
: extract(opt)
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
}
const extractFileSync = opt => {
const u = new Unpack.Sync(opt)
const file = opt.file
const stat = fs.statSync(file)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const stream = new fsm.ReadStreamSync(file, {
readSize: readSize,
size: stat.size,
})
stream.pipe(u)
}
const extractFile = (opt, cb) => {
const u = new Unpack(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
u.on('error', reject)
u.on('close', resolve)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
})
stream.on('error', reject)
stream.pipe(u)
}
})
})
return cb ? p.then(cb, cb) : p
}
const extractSync = opt => new Unpack.Sync(opt)
const extract = opt => new Unpack(opt)

20
lib/get-write-flag.js Normal file
View File

@ -0,0 +1,20 @@
// Get the appropriate flag to use for creating files
// We use fmap on Windows platforms for files less than
// 512kb. This is a fairly low limit, but avoids making
// things slower in some cases. Since most of what this
// library is used for is extracting tarballs of many
// relatively small files in npm packages and the like,
// it can be a big boost on Windows platforms.
// Only supported in Node v12.9.0 and above.
const platform = process.env.__FAKE_PLATFORM__ || process.platform
const isWindows = platform === 'win32'
const fs = global.__FAKE_TESTING_FS__ || require('fs')
/* istanbul ignore next */
const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants
const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP
const fMapLimit = 512 * 1024
const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY
module.exports = !fMapEnabled ? () => 'w'
: size => size < fMapLimit ? fMapFlag : 'w'

288
lib/header.js Normal file
View File

@ -0,0 +1,288 @@
'use strict'
// parse a 512-byte header block to a data object, or vice-versa
// encode returns `true` if a pax extended header is needed, because
// the data could not be faithfully encoded in a simple header.
// (Also, check header.needPax to see if it needs a pax header.)
const types = require('./types.js')
const pathModule = require('path').posix
const large = require('./large-numbers.js')
const SLURP = Symbol('slurp')
const TYPE = Symbol('type')
class Header {
constructor (data, off, ex, gex) {
this.cksumValid = false
this.needPax = false
this.nullBlock = false
this.block = null
this.path = null
this.mode = null
this.uid = null
this.gid = null
this.size = null
this.mtime = null
this.cksum = null
this[TYPE] = '0'
this.linkpath = null
this.uname = null
this.gname = null
this.devmaj = 0
this.devmin = 0
this.atime = null
this.ctime = null
if (Buffer.isBuffer(data))
this.decode(data, off || 0, ex, gex)
else if (data)
this.set(data)
}
decode (buf, off, ex, gex) {
if (!off)
off = 0
if (!buf || !(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
this.path = decString(buf, off, 100)
this.mode = decNumber(buf, off + 100, 8)
this.uid = decNumber(buf, off + 108, 8)
this.gid = decNumber(buf, off + 116, 8)
this.size = decNumber(buf, off + 124, 12)
this.mtime = decDate(buf, off + 136, 12)
this.cksum = decNumber(buf, off + 148, 12)
// if we have extended or global extended headers, apply them now
// See https://github.com/npm/node-tar/pull/187
this[SLURP](ex)
this[SLURP](gex, true)
// old tar versions marked dirs as a file with a trailing /
this[TYPE] = decString(buf, off + 156, 1)
if (this[TYPE] === '')
this[TYPE] = '0'
if (this[TYPE] === '0' && this.path.substr(-1) === '/')
this[TYPE] = '5'
// tar implementations sometimes incorrectly put the stat(dir).size
// as the size in the tarball, even though Directory entries are
// not able to have any body at all. In the very rare chance that
// it actually DOES have a body, we weren't going to do anything with
// it anyway, and it'll just be a warning about an invalid header.
if (this[TYPE] === '5')
this.size = 0
this.linkpath = decString(buf, off + 157, 100)
if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
this.uname = decString(buf, off + 265, 32)
this.gname = decString(buf, off + 297, 32)
this.devmaj = decNumber(buf, off + 329, 8)
this.devmin = decNumber(buf, off + 337, 8)
if (buf[off + 475] !== 0) {
// definitely a prefix, definitely >130 chars.
const prefix = decString(buf, off + 345, 155)
this.path = prefix + '/' + this.path
} else {
const prefix = decString(buf, off + 345, 130)
if (prefix)
this.path = prefix + '/' + this.path
this.atime = decDate(buf, off + 476, 12)
this.ctime = decDate(buf, off + 488, 12)
}
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++)
sum += buf[i]
for (let i = off + 156; i < off + 512; i++)
sum += buf[i]
this.cksumValid = sum === this.cksum
if (this.cksum === null && sum === 8 * 0x20)
this.nullBlock = true
}
[SLURP] (ex, global) {
for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = ex[k]
}
}
encode (buf, off) {
if (!buf) {
buf = this.block = Buffer.alloc(512)
off = 0
}
if (!off)
off = 0
if (!(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
const prefixSize = this.ctime || this.atime ? 130 : 155
const split = splitPrefix(this.path || '', prefixSize)
const path = split[0]
const prefix = split[1]
this.needPax = split[2]
this.needPax = encString(buf, off, 100, path) || this.needPax
this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
buf[off + 156] = this[TYPE].charCodeAt(0)
this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
buf.write('ustar\u000000', off + 257, 8)
this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
if (buf[off + 475] !== 0)
this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
else {
this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++)
sum += buf[i]
for (let i = off + 156; i < off + 512; i++)
sum += buf[i]
this.cksum = sum
encNumber(buf, off + 148, 8, this.cksum)
this.cksumValid = true
return this.needPax
}
set (data) {
for (const i in data) {
if (data[i] !== null && data[i] !== undefined)
this[i] = data[i]
}
}
get type () {
return types.name.get(this[TYPE]) || this[TYPE]
}
get typeKey () {
return this[TYPE]
}
set type (type) {
if (types.code.has(type))
this[TYPE] = types.code.get(type)
else
this[TYPE] = type
}
}
const splitPrefix = (p, prefixSize) => {
const pathSize = 100
let pp = p
let prefix = ''
let ret
const root = pathModule.parse(p).root || '.'
if (Buffer.byteLength(pp) < pathSize)
ret = [pp, prefix, false]
else {
// first set prefix to the dir, and path to the base
prefix = pathModule.dirname(pp)
pp = pathModule.basename(pp)
do {
// both fit!
if (Buffer.byteLength(pp) <= pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp, prefix, false]
// prefix fits in prefix, but path doesn't fit in path
else if (Buffer.byteLength(pp) > pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp.substr(0, pathSize - 1), prefix, true]
else {
// make path take a bit from prefix
pp = pathModule.join(pathModule.basename(prefix), pp)
prefix = pathModule.dirname(prefix)
}
} while (prefix !== root && !ret)
// at this point, found no resolution, just truncate
if (!ret)
ret = [p.substr(0, pathSize - 1), '', true]
}
return ret
}
const decString = (buf, off, size) =>
buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
const decDate = (buf, off, size) =>
numToDate(decNumber(buf, off, size))
const numToDate = num => num === null ? null : new Date(num * 1000)
const decNumber = (buf, off, size) =>
buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
: decSmallNumber(buf, off, size)
const nanNull = value => isNaN(value) ? null : value
const decSmallNumber = (buf, off, size) =>
nanNull(parseInt(
buf.slice(off, off + size)
.toString('utf8').replace(/\0.*$/, '').trim(), 8))
// the maximum encodable as a null-terminated octal, by field size
const MAXNUM = {
12: 0o77777777777,
8: 0o7777777,
}
const encNumber = (buf, off, size, number) =>
number === null ? false :
number > MAXNUM[size] || number < 0
? (large.encode(number, buf.slice(off, off + size)), true)
: (encSmallNumber(buf, off, size, number), false)
const encSmallNumber = (buf, off, size, number) =>
buf.write(octalString(number, size), off, size, 'ascii')
const octalString = (number, size) =>
padOctal(Math.floor(number).toString(8), size)
const padOctal = (string, size) =>
(string.length === size - 1 ? string
: new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
const encDate = (buf, off, size, date) =>
date === null ? false :
encNumber(buf, off, size, date.getTime() / 1000)
// enough to fill the longest string we've got
const NULLS = new Array(156).join('\0')
// pad with nulls, return true if it's longer or non-ascii
const encString = (buf, off, size, string) =>
string === null ? false :
(buf.write(string + NULLS, off, size, 'utf8'),
string.length !== Buffer.byteLength(string) || string.length > size)
module.exports = Header

29
lib/high-level-opt.js Normal file
View File

@ -0,0 +1,29 @@
'use strict'
// turn tar(1) style args like `C` into the more verbose things like `cwd`
const argmap = new Map([
['C', 'cwd'],
['f', 'file'],
['z', 'gzip'],
['P', 'preservePaths'],
['U', 'unlink'],
['strip-components', 'strip'],
['stripComponents', 'strip'],
['keep-newer', 'newer'],
['keepNewer', 'newer'],
['keep-newer-files', 'newer'],
['keepNewerFiles', 'newer'],
['k', 'keep'],
['keep-existing', 'keep'],
['keepExisting', 'keep'],
['m', 'noMtime'],
['no-mtime', 'noMtime'],
['p', 'preserveOwner'],
['L', 'follow'],
['h', 'follow'],
])
module.exports = opt => opt ? Object.keys(opt).map(k => [
argmap.has(k) ? argmap.get(k) : k, opt[k],
]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}

99
lib/large-numbers.js Normal file
View File

@ -0,0 +1,99 @@
'use strict'
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive.
const encode = (num, buf) => {
if (!Number.isSafeInteger(num))
// The number is so large that javascript cannot represent it with integer
// precision.
throw Error('cannot encode number outside of javascript safe integer range')
else if (num < 0)
encodeNegative(num, buf)
else
encodePositive(num, buf)
return buf
}
const encodePositive = (num, buf) => {
buf[0] = 0x80
for (var i = buf.length; i > 1; i--) {
buf[i - 1] = num & 0xff
num = Math.floor(num / 0x100)
}
}
const encodeNegative = (num, buf) => {
buf[0] = 0xff
var flipped = false
num = num * -1
for (var i = buf.length; i > 1; i--) {
var byte = num & 0xff
num = Math.floor(num / 0x100)
if (flipped)
buf[i - 1] = onesComp(byte)
else if (byte === 0)
buf[i - 1] = 0
else {
flipped = true
buf[i - 1] = twosComp(byte)
}
}
}
const parse = (buf) => {
const pre = buf[0]
const value = pre === 0x80 ? pos(buf.slice(1, buf.length))
: pre === 0xff ? twos(buf)
: null
if (value === null)
throw Error('invalid base256 encoding')
if (!Number.isSafeInteger(value))
// The number is so large that javascript cannot represent it with integer
// precision.
throw Error('parsed number outside of javascript safe integer range')
return value
}
const twos = (buf) => {
var len = buf.length
var sum = 0
var flipped = false
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
var f
if (flipped)
f = onesComp(byte)
else if (byte === 0)
f = byte
else {
flipped = true
f = twosComp(byte)
}
if (f !== 0)
sum -= f * Math.pow(256, len - i - 1)
}
return sum
}
const pos = (buf) => {
var len = buf.length
var sum = 0
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
if (byte !== 0)
sum += byte * Math.pow(256, len - i - 1)
}
return sum
}
const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
module.exports = {
encode,
parse,
}

132
lib/list.js Normal file
View File

@ -0,0 +1,132 @@
'use strict'
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
// tar -t
const hlo = require('./high-level-opt.js')
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const stripSlash = require('./strip-trailing-slashes.js')
module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
if (stat.size < readSize)
p.end(fs.readFileSync(file))
else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
}
const list = opt => new Parser(opt)

213
lib/mkdir.js Normal file
View File

@ -0,0 +1,213 @@
'use strict'
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
const mkdirp = require('mkdirp')
const fs = require('fs')
const path = require('path')
const chownr = require('chownr')
const normPath = require('./normalize-windows-path.js')
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
get name () {
return 'SylinkError'
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
}
get name () {
return 'CwdError'
}
}
const cGet = (cache, key) => cache.get(normPath(key))
const cSet = (cache, key, val) => cache.set(normPath(key), val)
const checkCwd = (dir, cb) => {
fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
module.exports = (dir, opt, cb) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
const done = (er, created) => {
if (er)
cb(er)
else {
cSet(cache, dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
else
cb()
}
}
if (cache && cGet(cache, dir) === true)
return done()
if (dir === cwd)
return checkCwd(dir, done)
if (preserve)
return mkdirp(dir, {mode}).then(made => done(null, made), done)
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = normPath(path.resolve(base + '/' + p))
if (cGet(cache, part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
fs.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr)
} else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
} else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
const checkCwdSync = dir => {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
}
module.exports.sync = (dir, opt) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
const done = (created) => {
cSet(cache, dir, true)
if (created && doChown)
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
if (cache && cGet(cache, dir) === true)
return done()
if (dir === cwd) {
checkCwdSync(cwd)
return done()
}
if (preserve)
return done(mkdirp.sync(dir, mode))
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
part = normPath(path.resolve(part))
if (cGet(cache, part))
continue
try {
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
} catch (er) {
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cSet(cache, part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
}
}
return done(created)
}

23
lib/mode-fix.js Normal file
View File

@ -0,0 +1,23 @@
'use strict'
module.exports = (mode, isDir, portable) => {
mode &= 0o7777
// in portable mode, use the minimum reasonable umask
// if this system creates files with 0o664 by default
// (as some linux distros do), then we'll write the
// archive with 0o644 instead. Also, don't ever create
// a file that is not readable/writable by the owner.
if (portable)
mode = (mode | 0o600) & ~0o22
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
}
return mode
}

11
lib/normalize-unicode.js Normal file
View File

@ -0,0 +1,11 @@
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null)
const {hasOwnProperty} = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s))
normalizeCache[s] = s.normalize('NFKD')
return normalizeCache[s]
}

View File

@ -0,0 +1,8 @@
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')

397
lib/pack.js Normal file
View File

@ -0,0 +1,397 @@
'use strict'
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const MiniPass = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const fs = require('fs')
const path = require('path')
const warner = require('./warn-mixin.js')
const normPath = require('./normalize-windows-path.js')
const Pack = warner(class Pack extends MiniPass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
if (this.portable)
opt.gzip.portable = true
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
}
[WRITE] (chunk) {
return super.write(chunk)
}
add (path) {
this.write(path)
return this
}
end (path) {
if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
write (path) {
if (this[ENDED])
throw new Error('write after end')
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}
[ADDTARENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p.path))
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
this[PROCESS]()
}
[ADDFSENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p))
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er)
this.emit('error', er)
else
this[ONSTAT](job, stat)
})
}
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
[PROCESS] () {
if (this[PROCESSING])
return
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
this[PROCESSING] = false
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip)
this.zip.end(EOF)
else {
super.write(EOF)
super.end()
}
}
}
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
}
[PROCESSJOB] (job) {
if (job.pending)
return
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return
}
if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
else
this[STAT](job)
}
if (!job.stat)
return
// filtered out!
if (job.ignore)
return
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
}
[ENTRYOPT] (job) {
return {
onwarn: (code, msg, data) => this.warn(code, msg, data),
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
}
[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
this.emit('error', er)
}
}
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
const source = job.entry
const zip = this.zip
if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
} else {
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
}
}
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
if (zip) {
source.on('data', chunk => {
zip.write(chunk)
})
} else {
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
}
Pack.Sync = PackSync
module.exports = Pack

481
lib/parse.js Normal file
View File

@ -0,0 +1,481 @@
'use strict'
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
//
// ignored entries get .resume() called on them straight away
const warner = require('./warn-mixin.js')
const Header = require('./header.js')
const EE = require('events')
const Yallist = require('yallist')
const maxMetaEntrySize = 1024 * 1024
const Entry = require('./read-entry.js')
const Pax = require('./pax.js')
const zlib = require('minizlib')
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const noop = _ => true
module.exports = warner(class Parser extends EE {
constructor (opt) {
opt = opt || {}
super(opt)
this.file = opt.file || ''
// set to boolean false when an entry starts. 1024 bytes of \0
// is technically a valid tarball, albeit a boring one.
this[SAW_VALID_ENTRY] = null
// these BADARCHIVE errors can't be detected early. listen on DONE.
this.on(DONE, _ => {
if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
}
})
if (opt.ondone)
this.on(DONE, opt.ondone)
else {
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
}
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
this[QUEUE] = new Yallist()
this[BUFFER] = null
this[READENTRY] = null
this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
}
[CONSUMEHEADER] (chunk, position) {
if (this[SAW_VALID_ENTRY] === null)
this[SAW_VALID_ENTRY] = false
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
if (header.nullBlock) {
if (this[SAW_NULL_BLOCK]) {
this[SAW_EOF] = true
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin')
this[STATE] = 'header'
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid)
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
else if (!header.path)
this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
// we do this for meta & ignored entries as well, because they
// are still valid tar, or else we wouldn't know to ignore them
if (!this[SAW_VALID_ENTRY]) {
if (entry.remain) {
// this might be the one!
const onend = () => {
if (!entry.invalid)
this[SAW_VALID_ENTRY] = true
}
entry.on('end', onend)
} else
this[SAW_VALID_ENTRY] = true
}
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
entry.resume()
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (entry.ignore) {
// probably valid, just not something we care about
this[EMIT]('ignoredEntry', entry)
this[STATE] = entry.remain ? 'ignore' : 'header'
entry.resume()
} else {
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'header'
entry.end()
}
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
}
[PROCESSENTRY] (entry) {
let go = true
if (!entry) {
this[READENTRY] = null
go = false
} else if (Array.isArray(entry))
this.emit.apply(this, entry)
else {
this[READENTRY] = entry
this.emit('entry', entry)
if (!entry.emittedEnd) {
entry.on('end', _ => this[NEXTENTRY]())
go = false
}
}
return go
}
[NEXTENTRY] () {
do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
}
[CONSUMEBODY] (chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
entry.write(c)
if (!entry.blockRemain) {
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
return c.length
}
[CONSUMEMETA] (chunk, position) {
const entry = this[WRITEENTRY]
const ret = this[CONSUMEBODY](chunk, position)
// if we finished, then the entry is reset
if (!this[WRITEENTRY])
this[EMITMETA](entry)
return ret
}
[EMIT] (ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY])
this.emit(ev, data, extra)
else
this[QUEUE].push([ev, data, extra])
}
[EMITMETA] (entry) {
this[EMIT]('meta', this[META])
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
case 'GlobalExtendedHeader':
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
case 'NextFileHasLongLinkpath':
this[EX] = this[EX] || Object.create(null)
this[EX].linkpath = this[META].replace(/\0.*/, '')
break
/* istanbul ignore next */
default: throw new Error('unknown meta: ' + entry.type)
}
}
abort (error) {
this[ABORTED] = true
this.emit('abort', error)
// always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false })
}
write (chunk) {
if (this[ABORTED])
return
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
}
this[WRITING] = true
if (this[UNZIP])
this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length)
this[READENTRY].once('drain', _ => this.emit('drain'))
return ret
}
[BUFFERCONCAT] (c) {
if (c && !this[ABORTED])
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
[MAYBEEND] () {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, {entry})
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
}
}
[CONSUMECHUNK] (chunk) {
if (this[CONSUMING])
this[BUFFERCONCAT](chunk)
else if (!chunk && !this[BUFFER])
this[MAYBEEND]()
else {
this[CONSUMING] = true
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
} else
this[CONSUMECHUNKSUB](chunk)
while (this[BUFFER] &&
this[BUFFER].length >= 512 &&
!this[ABORTED] &&
!this[SAW_EOF]) {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
}
this[CONSUMING] = false
}
if (!this[BUFFER] || this[ENDED])
this[MAYBEEND]()
}
[CONSUMECHUNKSUB] (chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position)
break
case 'meta':
position += this[CONSUMEMETA](chunk, position)
break
/* istanbul ignore next */
default:
throw new Error('invalid state: ' + this[STATE])
}
}
if (position < length) {
if (this[BUFFER])
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
else
this[BUFFER] = chunk.slice(position)
}
}
end (chunk) {
if (!this[ABORTED]) {
if (this[UNZIP])
this[UNZIP].end(chunk)
else {
this[ENDED] = true
this.write(chunk)
}
}
}
})

148
lib/path-reservations.js Normal file
View File

@ -0,0 +1,148 @@
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const assert = require('assert')
const normalize = require('./normalize-unicode.js')
const stripSlashes = require('./strip-trailing-slashes.js')
const { join } = require('path')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = join(set[set.length - 1], path)
set.push(path || '/')
return set
}, [])
return dirs
}
// functions currently running
const running = new Set()
// return the queues for each path the function cares about
// fn => {paths, dirs}
const getQueues = fn => {
const res = reservations.get(fn)
/* istanbul ignore if - unpossible */
if (!res)
throw new Error('function does not have any path reservations')
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
const check = fn => {
const {paths, dirs} = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
// run the function if it's first in line and not already running
const run = fn => {
if (running.has(fn) || !check(fn))
return false
running.add(fn)
fn(() => clear(fn))
return true
}
const clear = fn => {
if (!running.has(fn))
return false
const { paths, dirs } = reservations.get(fn)
const next = new Set()
paths.forEach(path => {
const q = queues.get(path)
assert.equal(q[0], fn)
if (q.length === 1)
queues.delete(path)
else {
q.shift()
if (typeof q[0] === 'function')
next.add(q[0])
else
q[0].forEach(fn => next.add(fn))
}
})
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1)
queues.delete(dir)
else if (q[0].size === 1) {
q.shift()
// must be a function or else the Set would've been reused
next.add(q[0])
} else
q[0].delete(fn)
})
running.delete(fn)
next.forEach(fn => run(fn))
return true
}
const reserve = (paths, fn) => {
// collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
})
const dirs = new Set(
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
)
reservations.set(fn, {dirs, paths})
paths.forEach(path => {
const q = queues.get(path)
if (!q)
queues.set(path, [fn])
else
q.push(fn)
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length - 1] instanceof Set)
q[q.length - 1].add(fn)
else
q.push(new Set([fn]))
})
return run(fn)
}
return { check, reserve }
}

143
lib/pax.js Normal file
View File

@ -0,0 +1,143 @@
'use strict'
const Header = require('./header.js')
const path = require('path')
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
}
encode () {
const body = this.encodeBody()
if (body === '')
return null
const bodyLen = Buffer.byteLength(body)
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++)
buf[i] = 0
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null,
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++)
buf[i] = 0
return buf
}
encodeBody () {
return (
this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
encodeField (field) {
if (this[field] === null || this[field] === undefined)
return ''
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
const merge = (a, b) =>
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
const parseKV = string =>
string
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
const parseKVLine = (set, line) => {
const n = parseInt(line, 10)
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1)
return set
line = line.substr((n + ' ').length)
const kv = line.split('=')
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
}
module.exports = Pax

100
lib/read-entry.js Normal file
View File

@ -0,0 +1,100 @@
'use strict'
const MiniPass = require('minipass')
const normPath = require('./normalize-windows-path.js')
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex
this.globalExtended = gex
this.header = header
this.startBlockSize = 512 * Math.ceil(header.size / 512)
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
this.path = normPath(header.path)
this.mode = header.mode
if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
if (ex)
this[SLURP](ex)
if (gex)
this[SLURP](gex, true)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
const r = this.remain
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
if (r >= writeLen)
return super.write(data)
// r < writeLen
return super.write(data.slice(0, r))
}
[SLURP] (ex, global) {
for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
}
}
}

223
lib/replace.js Normal file
View File

@ -0,0 +1,223 @@
'use strict'
// tar -r
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
const Header = require('./header.js')
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
return opt.sync ? replaceSync(opt, files)
: replace(opt, files, cb)
}
const replaceSync = (opt, files) => {
const p = new Pack.Sync(opt)
let threw = true
let fd
let position
try {
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT')
fd = fs.openSync(opt.file, 'w+')
else
throw er
}
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
throw new Error('cannot append to compressed archives')
if (!bytes)
break POSITION
}
const h = new Header(headBuf)
if (!h.cksumValid)
break
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
}
threw = false
streamSync(opt, p, position, fd, files)
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
addFilesSync(p, files)
}
const replace = (opt, files, cb) => {
files = Array.from(files)
const p = new Pack(opt)
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er)
fs.close(fd, _ => cb_(er))
else
cb_(null, pos)
}
let position = 0
if (size === 0)
return cb(null, 0)
let bufPos = 0
const headBuf = Buffer.alloc(512)
const onread = (er, bytes) => {
if (er)
return cb(er)
bufPos += bytes
if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
}
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
// truncated header
if (bufPos < 512)
return cb(null, position)
const h = new Header(headBuf)
if (!h.cksumValid)
return cb(null, position)
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > size)
return cb(null, position)
position += entryBlockSize + 512
if (position >= size)
return cb(null, position)
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const promise = new Promise((resolve, reject) => {
p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
if (er)
return reject(er)
fs.fstat(fd, (er, st) => {
if (er)
return fs.close(fd, () => reject(er))
getPos(fd, st.size, (er, position) => {
if (er)
return reject(er)
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
})
})
}
fs.open(opt.file, flag, onopen)
})
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
}
p.end()
}

View File

@ -0,0 +1,24 @@
// unix absolute paths are also absolute on win32, so we use this for both
const { isAbsolute, parse } = require('path').win32
// returns [root, stripped]
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
module.exports = path => {
let r = ''
let parsed = parse(path)
while (isAbsolute(path) || parsed.root) {
// windows will think that //x/y/z has a "root" of //x/y/
// but strip the //?/C:/ off of //?/C:/path
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
: parsed.root
path = path.substr(root.length)
r += root
parsed = parse(path)
}
return [r, path]
}

View File

@ -0,0 +1,13 @@
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
module.exports = str => {
let i = str.length - 1
let slashesStart = -1
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i
i--
}
return slashesStart === -1 ? str : str.slice(0, slashesStart)
}

44
lib/types.js Normal file
View File

@ -0,0 +1,44 @@
'use strict'
// map types from key to human-friendly name
exports.name = new Map([
['0', 'File'],
// same as File
['', 'OldFile'],
['1', 'Link'],
['2', 'SymbolicLink'],
// Devices and FIFOs aren't fully supported
// they are parsed, but skipped when unpacking
['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
])
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))

877
lib/unpack.js Normal file
View File

@ -0,0 +1,877 @@
'use strict'
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
// but the path reservations are required to avoid race conditions where
// parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
const assert = require('assert')
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const mkdir = require('./mkdir.js')
const wc = require('./winchars.js')
const pathReservations = require('./path-reservations.js')
const stripAbsolutePath = require('./strip-absolute-path.js')
const normPath = require('./normalize-windows-path.js')
const stripSlash = require('./strip-trailing-slashes.js')
const normalize = require('./normalize-unicode.js')
const ONENTRY = Symbol('onEntry')
const CHECKFS = Symbol('checkFs')
const CHECKFS2 = Symbol('checkFs2')
const PRUNECACHE = Symbol('pruneCache')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = require('crypto')
const getFlag = require('./get-write-flag.js')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows)
return fs.unlink(path, cb)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.rename(path, name, er => {
if (er)
return cb(er)
fs.unlink(name, cb)
})
}
/* istanbul ignore next */
const unlinkFileSync = path => {
if (!isWindows)
return fs.unlinkSync(path)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.renameSync(path, name)
fs.unlinkSync(name)
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
.toLowerCase()
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs)
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path)
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0)
cache.delete(path)
}
}
const dropCache = cache => {
for (const key of cache.keys())
cache.delete(key)
}
class Unpack extends Parser {
constructor (opt) {
if (!opt)
opt = {}
opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
}
super(opt)
this[CHECKED_CWD] = false
this.reservations = pathReservations()
this.transform = typeof opt.transform === 'function' ? opt.transform : null
this.writable = true
this.readable = false
this[PENDING] = 0
this[ENDED] = false
this.dirCache = opt.dirCache || new Map()
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
}
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
}
// default true for root
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
this.preserveOwner = process.getuid && process.getuid() === 0
else
this.preserveOwner = !!opt.preserveOwner
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid() : null
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || isWindows
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer
// do not unpack over ANY files
this.keep = !!opt.keep
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink
this.cwd = normPath(path.resolve(opt.cwd || process.cwd()))
this.strip = +opt.strip || 0
// if we're not chmodding, then we don't need the process umask
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
this.on('entry', entry => this[ONENTRY](entry))
}
// a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn (code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT')
data.recoverable = false
return super.warn(code, msg, data)
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
}
}
[CHECKPATH] (entry) {
if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip)
return false
entry.path = parts.slice(this.strip).join('/')
if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip)
entry.linkpath = linkparts.slice(this.strip).join('/')
else
return false
}
}
if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
// strip off the root
const [root, stripped] = stripAbsolutePath(p)
if (root) {
entry.path = stripped
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
entry,
path: p,
})
}
}
if (path.isAbsolute(entry.path))
entry.absolute = normPath(path.resolve(entry.path))
else
entry.absolute = normPath(path.resolve(this.cwd, entry.path))
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* istanbul ignore if - defense in depth */
if (!this.preservePaths &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd,
})
return false
}
// an archive can set properties on the extraction directory, but it
// may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir')
return false
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = path.win32.parse(entry.absolute)
entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length))
const { root: pRoot } = path.win32.parse(entry.path)
entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length))
}
return true
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry))
return entry.resume()
assert.equal(typeof entry.absolute, 'string')
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry)
}
}
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError')
this.emit('error', er)
else {
this.warn('TAR_ENTRY_ERROR', er, {entry})
this[UNPEND]()
entry.resume()
}
}
[MKDIR] (dir, mode, cb) {
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
[DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
[UID] (entry) {
return uint32(this.uid, entry.uid, this.processUid)
}
[GID] (entry) {
return uint32(this.gid, entry.gid, this.processGid)
}
[FILE] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
let actions = 1
const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
this[ONERROR](er, entry)
fullyDone()
return
}
if (--actions === 0) {
fs.close(stream.fd, er => {
if (er)
this[ONERROR](er, entry)
else
this[UNPEND]()
fullyDone()
})
}
}
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) {
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
if (this[DOCHOWN](entry)) {
actions++
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
done()
})
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
}
[DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
let actions = 1
const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
done()
})
}
[UNSUPPORTED] (entry) {
entry.unsupported = true
this.warn('TAR_ENTRY_UNSUPPORTED',
`unsupported entry type: ${entry.type}`, {entry})
entry.resume()
}
[SYMLINK] (entry, done) {
this[LINK](entry, entry.linkpath, 'symlink', done)
}
[HARDLINK] (entry, done) {
const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
this[LINK](entry, linkpath, 'link', done)
}
[PEND] () {
this[PENDING]++
}
[UNPEND] () {
this[PENDING]--
this[MAYBECLOSE]()
}
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
// check if a thing is there, and if so, try to clobber it
[CHECKFS] (entry) {
this[PEND]()
const paths = [entry.path]
if (entry.linkpath)
paths.push(entry.linkpath)
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
[PRUNECACHE] (entry) {
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink')
dropCache(this.dirCache)
else if (entry.type !== 'Directory')
pruneCache(this.dirCache, entry.absolute)
}
[CHECKFS2] (entry, fullyDone) {
this[PRUNECACHE](entry)
const done = er => {
this[PRUNECACHE](entry)
fullyDone(er)
}
const checkCwd = () => {
this[MKDIR](this.cwd, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
this[CHECKED_CWD] = true
start()
})
}
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
}
afterMakeParent()
}
const afterMakeParent = () => {
fs.lstat(entry.absolute, (lstatEr, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
done()
return
}
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry, done)
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod)
return afterChmod()
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
// not a dir, and not reusable
// don't remove if the cwd, we want that error
if (entry.absolute === this.cwd)
return this[MAKEFS](null, entry, done)
unlinkFile(entry.absolute, er =>
this[MAKEFS](er, entry, done))
})
}
if (this[CHECKED_CWD])
start()
else
checkCwd()
}
[MAKEFS] (er, entry, done) {
if (er) {
this[ONERROR](er, entry)
done()
return
}
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry, done)
case 'Link':
return this[HARDLINK](entry, done)
case 'SymbolicLink':
return this[SYMLINK](entry, done)
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry, done)
}
}
[LINK] (entry, linkpath, link, done) {
// XXX: get the type ('symlink' or 'junction') for windows
fs[link](linkpath, entry.absolute, er => {
if (er)
this[ONERROR](er, entry)
else {
this[UNPEND]()
entry.resume()
}
done()
})
}
}
const callSync = fn => {
try {
return [null, fn()]
} catch (er) {
return [er, null]
}
}
class UnpackSync extends Unpack {
[MAKEFS] (er, entry) {
return super[MAKEFS](er, entry, () => {})
}
[CHECKFS] (entry) {
this[PRUNECACHE](entry)
if (!this[CHECKED_CWD]) {
const er = this[MKDIR](this.cwd, this.dmode)
if (er)
return this[ONERROR](er, entry)
this[CHECKED_CWD] = true
}
// don't bother to make the parent if the current entry is the cwd,
// we've already checked it.
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode)
if (mkParent)
return this[ONERROR](mkParent, entry)
}
}
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
if (st && (this.keep || this.newer && st.mtime > entry.mtime))
return this[SKIP](entry)
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry)
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
}
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
}
// not a dir, and not reusable.
// don't remove if it's the cwd, since we want that error.
const [er] = entry.absolute === this.cwd ? []
: callSync(() => unlinkFileSync(entry.absolute))
this[MAKEFS](er, entry)
}
[FILE] (entry, done) {
const mode = entry.mode & 0o7777 || this.fmode
const oner = er => {
let closeError
try {
fs.closeSync(fd)
} catch (e) {
closeError = e
}
if (er || closeError)
this[ONERROR](er || closeError, entry)
done()
}
let fd
try {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
} catch (er) {
return oner(er)
}
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
try {
fs.fchownSync(fd, uid, gid)
} catch (fchowner) {
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
}
oner(er)
})
}
[DIRECTORY] (entry, done) {
const mode = entry.mode & 0o7777 || this.dmode
const er = this[MKDIR](entry.absolute, mode)
if (er) {
this[ONERROR](er, entry)
done()
return
}
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
}
done()
entry.resume()
}
[MKDIR] (dir, mode) {
try {
return mkdir.sync(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
})
} catch (er) {
return er
}
}
[LINK] (entry, linkpath, link, done) {
try {
fs[link + 'Sync'](linkpath, entry.absolute)
done()
entry.resume()
} catch (er) {
return this[ONERROR](er, entry)
}
}
}
Unpack.Sync = UnpackSync
module.exports = Unpack

36
lib/update.js Normal file
View File

@ -0,0 +1,36 @@
'use strict'
// tar -u
const hlo = require('./high-level-opt.js')
const r = require('./replace.js')
// just call tar.r with the filter and mtimeCache
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
mtimeFilter(opt)
return r(opt, files, cb)
}
const mtimeFilter = opt => {
const filter = opt.filter
if (!opt.mtimeCache)
opt.mtimeCache = new Map()
opt.filter = filter ? (path, stat) =>
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}

21
lib/warn-mixin.js Normal file
View File

@ -0,0 +1,21 @@
'use strict'
module.exports = Base => class extends Base {
warn (code, message, data = {}) {
if (this.file)
data.file = this.file
if (this.cwd)
data.cwd = this.cwd
data.code = message instanceof Error && message.code || code
data.tarCode = code
if (!this.strict && data.recoverable !== false) {
if (message instanceof Error) {
data = Object.assign(message, data)
message = message.message
}
this.emit('warn', data.tarCode, message, data)
} else if (message instanceof Error)
this.emit('error', Object.assign(message, data))
else
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
}
}

23
lib/winchars.js Normal file
View File

@ -0,0 +1,23 @@
'use strict'
// When writing files on Windows, translate the characters to their
// 0xf000 higher-encoded versions.
const raw = [
'|',
'<',
'>',
'?',
':',
]
const win = raw.map(char =>
String.fromCharCode(0xf000 + char.charCodeAt(0)))
const toWin = new Map(raw.map((char, i) => [char, win[i]]))
const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
module.exports = {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
}

525
lib/write-entry.js Normal file
View File

@ -0,0 +1,525 @@
'use strict'
const MiniPass = require('minipass')
const Pax = require('./pax.js')
const Header = require('./header.js')
const fs = require('fs')
const path = require('path')
const normPath = require('./normalize-windows-path.js')
const stripSlash = require('./strip-trailing-slashes.js')
const prefixPath = (path, prefix) => {
if (!prefix)
return normPath(path)
path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = require('./warn-mixin.js')
const winchars = require('./winchars.js')
const stripAbsolutePath = require('./strip-absolute-path.js')
const modeFix = require('./mode-fix.js')
const WriteEntry = warner(class WriteEntry extends MiniPass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string')
throw new TypeError('path is required')
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
this.fd = null
this.blockLen = null
this.blockRemain = null
this.buf = null
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
this.win32 = !!opt.win32 || process.platform === 'win32'
if (this.win32) {
// force the \ to / normalization, since we might not *actually*
// be on windows, but want \ to be considered a path separator.
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
}
this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
if (this.path === '')
this.path = './'
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
}
if (this.statCache.has(this.absolute))
this[ONLSTAT](this.statCache.get(this.absolute))
else
this[LSTAT]()
}
emit (ev, ...data) {
if (ev === 'error')
this[HAD_ERROR] = true
return super.emit(ev, ...data)
}
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er)
return this.emit('error', er)
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
[HEADER] () {
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.header = new Header({
path: this[PREFIX](this.path),
// only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
}
super.write(this.header.block)
}
[DIRECTORY] () {
if (this.path.substr(-1) !== '/')
this.path += '/'
this.stat.size = 0
this[HEADER]()
this.end()
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er)
return this.emit('error', er)
this[ONREADLINK](linkpath)
})
}
[ONREADLINK] (linkpath) {
this.linkpath = normPath(linkpath)
this[HEADER]()
this.end()
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
}
this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]()
if (this.stat.size === 0)
return this.end()
this[OPENFILE]()
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
})
}
[ONOPENFILE] (fd) {
this.fd = fd
if (this[HAD_ERROR])
return this[CLOSE]()
this.blockLen = 512 * Math.ceil(this.stat.size / 512)
this.blockRemain = this.blockLen
const bufLen = Math.min(this.blockLen, this.maxReadSize)
this.buf = Buffer.allocUnsafe(bufLen)
this.offset = 0
this.pos = 0
this.remain = this.stat.size
this.length = this.buf.length
this[READ]()
}
[READ] () {
const { fd, buf, offset, length, pos } = this
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
}
[CLOSE] (cb) {
fs.close(this.fd, cb)
}
[ONREAD] (bytesRead) {
if (bytesRead <= 0 && this.remain > 0) {
const er = new Error('encountered unexpected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
if (bytesRead > this.remain) {
const er = new Error('did not encounter expected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
// null out the rest of the buffer, if we could fit the block padding
// at the end of this loop, we've incremented bytesRead and this.remain
// to be incremented up to the blockRemain level, as if we had expected
// to get a null-padded file, and read it until the end. then we will
// decrement both remain and blockRemain by bytesRead, and know that we
// reached the expected EOF, without any null buffer to append.
if (bytesRead === this.remain) {
for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
this.buf[i + this.offset] = 0
bytesRead++
this.remain++
}
}
const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
const flushed = this.write(writeBuf)
if (!flushed)
this[AWAITDRAIN](() => this[ONDRAIN]())
else
this[ONDRAIN]()
}
[AWAITDRAIN] (cb) {
this.once('drain', cb)
}
write (writeBuf) {
if (this.blockRemain < writeBuf.length) {
const er = new Error('writing more data than expected')
er.path = this.absolute
return this.emit('error', er)
}
this.remain -= writeBuf.length
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
}
[ONDRAIN] () {
if (!this.remain) {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return this[CLOSE](er => er ? this.emit('error', er) : this.end())
}
if (this.offset >= this.length) {
// if we only have a smaller bit left to read, alloc a smaller buffer
// otherwise, keep it the same length it was before.
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
this.offset = 0
}
this.length = this.buf.length - this.offset
this[READ]()
}
})
class WriteEntrySync extends WriteEntry {
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
}
[SYMLINK] () {
this[ONREADLINK](fs.readlinkSync(this.absolute))
}
[OPENFILE] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
}
[READ] () {
let threw = true
try {
const { fd, buf, offset, length, pos } = this
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](bytesRead)
threw = false
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try {
this[CLOSE](() => {})
} catch (er) {}
}
}
}
[AWAITDRAIN] (cb) {
cb()
}
[CLOSE] (cb) {
fs.closeSync(this.fd)
cb()
}
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.prefix = opt.prefix || null
this.path = normPath(readEntry.path)
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
this.remain = readEntry.size
this.blockRemain = readEntry.startBlockSize
this.header = new Header({
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
}
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
gid: this.portable ? null : this.gid,
mtime: this.noMtime ? null : this.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
}
super.write(this.header.block)
readEntry.pipe(this)
}
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
this.blockRemain -= writeLen
return super.write(data)
}
end () {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return super.end()
}
})
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = WriteEntry

9
map.js Normal file
View File

@ -0,0 +1,9 @@
const {basename} = require('path')
const map = test =>
test === 'index.js' || test === 'map.js' ? test
: test === 'unpack.js' ? ['lib/unpack.js', 'lib/mkdir.js']
: test === 'load-all.js' ? []
: `lib/${test}`
module.exports = test => map(basename(test))

12411
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

59
package.json Normal file
View File

@ -0,0 +1,59 @@
{
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"name": "tar",
"description": "tar for node",
"version": "6.1.11",
"repository": {
"type": "git",
"url": "https://github.com/npm/node-tar.git"
},
"scripts": {
"test:posix": "tap",
"test:win32": "tap --lines=98 --branches=98 --statements=98 --functions=98",
"test": "node test/fixtures/test.js",
"posttest": "npm run lint",
"eslint": "eslint",
"lint": "npm run eslint -- test lib",
"lintfix": "npm run lint -- --fix",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
"genparse": "node scripts/generate-parse-fixtures.js",
"bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done"
},
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
"minipass": "^3.0.0",
"minizlib": "^2.1.1",
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"devDependencies": {
"chmodr": "^1.2.0",
"end-of-stream": "^1.4.3",
"eslint": "^7.17.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^5.0.0",
"events-to-array": "^1.1.2",
"mutate-fs": "^2.1.1",
"rimraf": "^2.7.1",
"tap": "^15.0.9",
"tar-fs": "^1.16.3",
"tar-stream": "^1.6.2"
},
"license": "ISC",
"engines": {
"node": ">= 10"
},
"files": [
"index.js",
"lib/*.js"
],
"tap": {
"coverage-map": "map.js",
"check-coverage": true
}
}

View File

@ -0,0 +1,97 @@
'use strict'
const Parse = require('../lib/parse.js')
const fs = require('fs')
const path = require('path')
const tardir = path.resolve(__dirname, '../test/fixtures/tars')
const parsedir = path.resolve(__dirname, '../test/fixtures/parse')
const maxMetaOpt = [250, null]
const filterOpt = [ true, false ]
const strictOpt = [ true, false ]
const makeTest = (tarfile, tardata, maxMeta, filter, strict) => {
const o =
(maxMeta ? '-meta-' + maxMeta : '') +
(filter ? '-filter' : '') +
(strict ? '-strict' : '')
const tail = (o ? '-' + o : '') + '.json'
const eventsfile = parsedir + '/' + path.basename(tarfile, '.tar') + tail
const p = new Parse({
maxMetaEntrySize: maxMeta,
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
strict: strict
})
const events = []
const pushEntry = type => entry => {
events.push([type, {
extended: entry.extended,
globalExtended: entry.globalExtended,
type: entry.type,
meta: entry.meta,
ignore: entry.ignore,
path: entry.path,
mode: entry.mode,
uid: entry.uid,
gid: entry.gid,
uname: entry.uname,
gname: entry.gname,
size: entry.size,
mtime: entry.mtime,
atime: entry.atime,
ctime: entry.ctime,
linkpath: entry.linkpath,
header: {
cksumValid: entry.header.cksumValid,
needPax: entry.header.needPax,
path: entry.header.path,
mode: entry.header.mode,
uid: entry.header.uid,
gid: entry.header.gid,
size: entry.header.size,
mtime: entry.header.mtime,
cksum: entry.header.cksum,
linkpath: entry.header.linkpath,
ustar: entry.header.ustar,
ustarver: entry.header.ustarver,
uname: entry.header.uname,
gname: entry.header.gname,
devmaj: entry.header.devmaj,
devmin: entry.header.devmin,
ustarPrefix: entry.header.ustarPrefix,
xstarPrefix: entry.header.xstarPrefix,
prefixTerminator: entry.header.prefixTerminator,
atime: entry.header.atime,
ctime: entry.header.atime
}
}])
entry.resume()
}
p.on('entry', pushEntry('entry'))
p.on('ignoredEntry', pushEntry('ignoredEntry'))
p.on('warn', (code, message, data) => events.push(['warn', code, message]))
p.on('error', er => events.push(['error', {
message: er.message,
code: er.code
}]))
p.on('end', _ => events.push(['end']))
p.on('nullBlock', _ => events.push(['nullBlock']))
p.on('eof', _ => events.push(['eof']))
p.on('meta', meta => events.push(['meta', meta]))
p.end(tardata)
console.log(eventsfile)
fs.writeFileSync(eventsfile, JSON.stringify(events, null, 2) + '\n')
}
fs.readdirSync(tardir)
.forEach(tar => {
const tarfile = tardir + '/' + tar
const tardata = fs.readFileSync(tarfile)
maxMetaOpt.forEach(maxMeta =>
filterOpt.forEach(filter =>
strictOpt.forEach(strict =>
makeTest(tarfile, tardata, maxMeta, filter, strict))))
})

227
test/create.js Normal file
View File

@ -0,0 +1,227 @@
'use strict'
const isWindows = process.platform === 'win32'
const t = require('tap')
const c = require('../lib/create.js')
const list = require('../lib/list.js')
const fs = require('fs')
const path = require('path')
const dir = path.resolve(__dirname, 'fixtures/create')
const tars = path.resolve(__dirname, 'fixtures/tars')
const rimraf = require('rimraf')
const mkdirp = require('mkdirp')
const spawn = require('child_process').spawn
const Pack = require('../lib/pack.js')
const mutateFS = require('mutate-fs')
const {promisify} = require('util')
const readtar = (file, cb) => {
const child = spawn('tar', ['tf', file])
const out = []
child.stdout.on('data', c => out.push(c))
child.on('close', (code, signal) =>
cb(code, signal, Buffer.concat(out).toString()))
}
t.teardown(() => new Promise(resolve => rimraf(dir, resolve)))
t.before(async () => {
await promisify(rimraf)(dir)
await mkdirp(dir)
})
t.test('no cb if sync or without file', t => {
t.throws(_ => c({ sync: true }, ['asdf'], _ => _))
t.throws(_ => c(_ => _))
t.throws(_ => c({}, _ => _))
t.throws(_ => c({}, ['asdf'], _ => _))
t.end()
})
t.test('create file', t => {
const files = [path.basename(__filename)]
t.test('sync', t => {
const file = path.resolve(dir, 'sync.tar')
c({
file: file,
cwd: __dirname,
sync: true,
}, files)
readtar(file, (code, signal, list) => {
t.equal(code, 0)
t.equal(signal, null)
t.equal(list.trim(), 'create.js')
t.end()
})
})
t.test('async', t => {
const file = path.resolve(dir, 'async.tar')
c({
file: file,
cwd: __dirname,
}, files, er => {
if (er)
throw er
readtar(file, (code, signal, list) => {
t.equal(code, 0)
t.equal(signal, null)
t.equal(list.trim(), 'create.js')
t.end()
})
})
})
t.test('async promise only', t => {
const file = path.resolve(dir, 'promise.tar')
c({
file: file,
cwd: __dirname,
}, files).then(_ => {
readtar(file, (code, signal, list) => {
t.equal(code, 0)
t.equal(signal, null)
t.equal(list.trim(), 'create.js')
t.end()
})
})
})
t.test('with specific mode', t => {
const mode = isWindows ? 0o666 : 0o740
t.test('sync', t => {
const file = path.resolve(dir, 'sync-mode.tar')
c({
mode: mode,
file: file,
cwd: __dirname,
sync: true,
}, files)
readtar(file, (code, signal, list) => {
t.equal(code, 0)
t.equal(signal, null)
t.equal(list.trim(), 'create.js')
t.equal(fs.lstatSync(file).mode & 0o7777, mode)
t.end()
})
})
t.test('async', t => {
const file = path.resolve(dir, 'async-mode.tar')
c({
mode: mode,
file: file,
cwd: __dirname,
}, files, er => {
if (er)
throw er
readtar(file, (code, signal, list) => {
t.equal(code, 0)
t.equal(signal, null)
t.equal(list.trim(), 'create.js')
t.equal(fs.lstatSync(file).mode & 0o7777, mode)
t.end()
})
})
})
t.end()
})
t.end()
})
t.test('create', t => {
t.type(c({ sync: true }, ['README.md']), Pack.Sync)
t.type(c(['README.md']), Pack)
t.end()
})
t.test('open fails', t => {
const poop = new Error('poop')
const file = path.resolve(dir, 'throw-open.tar')
t.teardown(mutateFS.statFail(poop))
t.throws(_ => c({
file: file,
sync: true,
cwd: __dirname,
}, [path.basename(__filename)]))
t.throws(_ => fs.lstatSync(file))
t.end()
})
t.test('gzipped tarball that makes some drain/resume stuff', t => {
const cwd = path.dirname(__dirname)
const out = path.resolve(dir, 'package.tgz')
// don't include node_modules/.cache, since that gets written to
// by nyc during tests, and can result in spurious errors.
const entries = fs.readdirSync(`${cwd}/node_modules`)
.filter(e => !/^\./.test(e))
.map(e => `node_modules/${e}`)
c({ z: true, C: cwd }, entries)
.pipe(fs.createWriteStream(out))
.on('finish', _ => {
const child = spawn('tar', ['tf', out], {
stdio: ['ignore', 'ignore', 'pipe'],
})
child.stderr.on('data', c => {
t.fail(c + '')
})
child.on('close', (code, signal) => {
t.equal(code, 0)
t.equal(signal, null)
t.end()
})
})
})
t.test('create tarball out of another tarball', t => {
const out = path.resolve(dir, 'out.tar')
const check = t => {
const expect = [
'dir/',
'Ω.txt',
'🌟.txt',
'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt',
'hardlink-1',
'hardlink-2',
'symlink',
]
list({ f: out,
sync: true,
onentry: entry => {
if (entry.path === 'hardlink-2')
t.equal(entry.type, 'Link')
else if (entry.path === 'symlink')
t.equal(entry.type, 'SymbolicLink')
else if (entry.path === 'dir/')
t.equal(entry.type, 'Directory')
else
t.equal(entry.type, 'File')
t.equal(entry.path, expect.shift())
}})
t.same(expect, [])
t.end()
}
t.test('sync', t => {
c({
f: out,
cwd: tars,
sync: true,
}, ['@dir.tar', '@utf8.tar', '@links.tar'])
check(t)
})
t.test('async', t => {
c({
f: out,
cwd: tars,
}, ['@dir.tar', '@utf8.tar', '@links.tar'], _ => check(t))
})
t.end()
})

BIN
test/dir-normalization.tar Normal file

Binary file not shown.

231
test/extract.js Normal file
View File

@ -0,0 +1,231 @@
'use strict'
const t = require('tap')
const x = require('../lib/extract.js')
const path = require('path')
const fs = require('fs')
const extractdir = path.resolve(__dirname, 'fixtures/extract')
const tars = path.resolve(__dirname, 'fixtures/tars')
const mkdirp = require('mkdirp')
const {promisify} = require('util')
const rimraf = promisify(require('rimraf'))
const mutateFS = require('mutate-fs')
t.teardown(_ => rimraf(extractdir))
t.test('basic extracting', t => {
const file = path.resolve(tars, 'utf8.tar')
const dir = path.resolve(extractdir, 'basic')
t.beforeEach(async () => {
await rimraf(dir)
await mkdirp(dir)
})
const check = async t => {
fs.lstatSync(dir + '/Ω.txt')
fs.lstatSync(dir + '/🌟.txt')
t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' +
'/f/o/l/d/e/r/-/p/a/t/h/Ω.txt'))
await rimraf(dir)
t.end()
}
const files = ['🌟.txt', 'Ω.txt']
t.test('sync', t => {
x({ file: file, sync: true, C: dir }, files)
return check(t)
})
t.test('async promisey', t => {
return x({ file: file, cwd: dir }, files).then(_ => check(t))
})
t.test('async cb', t => {
return x({ file: file, cwd: dir }, files, er => {
if (er)
throw er
return check(t)
})
})
t.end()
})
t.test('file list and filter', t => {
const file = path.resolve(tars, 'utf8.tar')
const dir = path.resolve(extractdir, 'filter')
t.beforeEach(async () => {
await rimraf(dir)
await mkdirp(dir)
})
const check = async t => {
fs.lstatSync(dir + '/Ω.txt')
t.throws(_ => fs.lstatSync(dir + '/🌟.txt'))
t.throws(_ => fs.lstatSync(dir + '/long-path/r/e/a/l/l/y/-/d/e/e/p/-' +
'/f/o/l/d/e/r/-/p/a/t/h/Ω.txt'))
await rimraf(dir)
t.end()
}
const filter = path => path === 'Ω.txt'
t.test('sync', t => {
x({ filter: filter, file: file, sync: true, C: dir }, ['🌟.txt', 'Ω.txt'])
return check(t)
})
t.test('async promisey', t => {
return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt']).then(_ => {
return check(t)
})
})
t.test('async cb', t => {
return x({ filter: filter, file: file, cwd: dir }, ['🌟.txt', 'Ω.txt'], er => {
if (er)
throw er
return check(t)
})
})
t.end()
})
t.test('no file list', t => {
const file = path.resolve(tars, 'body-byte-counts.tar')
const dir = path.resolve(extractdir, 'no-list')
t.beforeEach(async () => {
await rimraf(dir)
await mkdirp(dir)
})
const check = async t => {
t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024)
t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512)
t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1)
t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0)
await rimraf(dir)
t.end()
}
t.test('sync', t => {
x({ file: file, sync: true, C: dir })
return check(t)
})
t.test('async promisey', t => {
return x({ file: file, cwd: dir }).then(_ => {
return check(t)
})
})
t.test('async cb', t => {
return x({ file: file, cwd: dir }, er => {
if (er)
throw er
return check(t)
})
})
t.end()
})
t.test('read in itty bits', t => {
const maxReadSize = 1000
const file = path.resolve(tars, 'body-byte-counts.tar')
const dir = path.resolve(extractdir, 'no-list')
t.beforeEach(async () => {
await rimraf(dir)
await mkdirp(dir)
})
const check = async t => {
t.equal(fs.lstatSync(path.resolve(dir, '1024-bytes.txt')).size, 1024)
t.equal(fs.lstatSync(path.resolve(dir, '512-bytes.txt')).size, 512)
t.equal(fs.lstatSync(path.resolve(dir, 'one-byte.txt')).size, 1)
t.equal(fs.lstatSync(path.resolve(dir, 'zero-byte.txt')).size, 0)
await rimraf(dir)
t.end()
}
t.test('sync', t => {
x({ file: file, sync: true, C: dir, maxReadSize: maxReadSize })
return check(t)
})
t.test('async promisey', t => {
return x({ file: file, cwd: dir, maxReadSize: maxReadSize }).then(_ => {
return check(t)
})
})
t.test('async cb', t => {
return x({ file: file, cwd: dir, maxReadSize: maxReadSize }, er => {
if (er)
throw er
return check(t)
})
})
t.end()
})
t.test('bad calls', t => {
t.throws(_ => x(_ => _))
t.throws(_ => x({sync: true}, _ => _))
t.throws(_ => x({sync: true}, [], _ => _))
t.end()
})
t.test('no file', t => {
const Unpack = require('../lib/unpack.js')
t.type(x(), Unpack)
t.type(x(['asdf']), Unpack)
t.type(x({sync: true}), Unpack.Sync)
t.end()
})
t.test('nonexistent', t => {
t.throws(_ => x({sync: true, file: 'does not exist' }))
x({ file: 'does not exist' }).catch(_ => t.end())
})
t.test('read fail', t => {
const poop = new Error('poop')
t.teardown(mutateFS.fail('read', poop))
t.throws(_ => x({maxReadSize: 10, sync: true, file: __filename }), poop)
t.end()
})
t.test('sync gzip error edge case test', async t => {
const file = path.resolve(__dirname, 'fixtures/sync-gzip-fail.tgz')
const dir = path.resolve(__dirname, 'sync-gzip-fail')
const cwd = process.cwd()
await mkdirp(dir + '/x')
process.chdir(dir)
t.teardown(async () => {
process.chdir(cwd)
await rimraf(dir)
})
x({
sync: true,
file: file,
onwarn: (c, m, er) => {
throw er
},
})
t.same(fs.readdirSync(dir + '/x').sort(),
['1', '10', '2', '3', '4', '5', '6', '7', '8', '9'])
t.end()
})

1
test/fixtures/files/.dotfile vendored Normal file
View File

@ -0,0 +1 @@
.

View File

@ -0,0 +1 @@
cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc

1
test/fixtures/files/1024-bytes.txt vendored Normal file
View File

@ -0,0 +1 @@
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

View File

@ -0,0 +1 @@
cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc

View File

@ -0,0 +1 @@
cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc

1
test/fixtures/files/512-bytes.txt vendored Normal file
View File

@ -0,0 +1 @@
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

View File

@ -0,0 +1,3 @@
Behold! I am a text file and NOT a tar header.
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

View File

@ -0,0 +1,3 @@
Behold! I am a text file and NOT a tar header.
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx

0
test/fixtures/files/dir/x vendored Normal file
View File

1
test/fixtures/files/hardlink-1 vendored Normal file
View File

@ -0,0 +1 @@
this link is like diamond

1
test/fixtures/files/hardlink-2 vendored Normal file
View File

@ -0,0 +1 @@
this link is like diamond

View File

@ -0,0 +1 @@
short

View File

@ -0,0 +1 @@
1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111

View File

@ -0,0 +1 @@
2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222

1
test/fixtures/files/longlink vendored Symbolic link
View File

@ -0,0 +1 @@
170-byte-filename-cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc

1
test/fixtures/files/one-byte.txt vendored Normal file
View File

@ -0,0 +1 @@
a

View File

@ -0,0 +1 @@
this link is like diamond

View File

@ -0,0 +1 @@
this link is like diamond

View File

@ -0,0 +1 @@
this link is like diamond

1
test/fixtures/files/strip-dir/symlink vendored Symbolic link
View File

@ -0,0 +1 @@
hardlink-2

1
test/fixtures/files/symlink vendored Symbolic link
View File

@ -0,0 +1 @@
hardlink-2

0
test/fixtures/files/zero-byte.txt vendored Normal file
View File

1
test/fixtures/files/Ω.txt vendored Normal file
View File

@ -0,0 +1 @@
Ω

1
test/fixtures/files/🌟.txt vendored Normal file
View File

@ -0,0 +1 @@
🌟✧✩⭐︎✪✫✬✭✮⚝✯✰✵✶✷✸✹❂⭑⭒★☆✡☪✴︎✦✡️🔯✴️🌠

View File

@ -0,0 +1,64 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,60 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,64 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,60 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,64 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,60 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,64 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"error",
{
"message": "TAR_ENTRY_INVALID: checksum failure",
"code": "TAR_ENTRY_INVALID"
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

60
test/fixtures/parse/bad-cksum.json vendored Normal file
View File

@ -0,0 +1,60 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"warn",
"TAR_ENTRY_INVALID",
"checksum failure"
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,167 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "1024-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1024,
"mtime": "2017-04-10T16:57:47.000Z",
"cksum": 6109,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "512-bytes.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 512,
"mtime": "2017-04-10T17:08:55.000Z",
"cksum": 6064,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "zero-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:08:01.000Z",
"cksum": 6246,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,50 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": true,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

50
test/fixtures/parse/dir--filter.json vendored Normal file
View File

@ -0,0 +1,50 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": true,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,50 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": true,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,50 @@
[
[
"ignoredEntry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": true,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,50 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

50
test/fixtures/parse/dir--meta-250.json vendored Normal file
View File

@ -0,0 +1,50 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

50
test/fixtures/parse/dir--strict.json vendored Normal file
View File

@ -0,0 +1,50 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

50
test/fixtures/parse/dir.json vendored Normal file
View File

@ -0,0 +1,50 @@
[
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "Directory",
"meta": false,
"ignore": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "dir/",
"mode": 493,
"uid": 501,
"gid": 20,
"size": 0,
"mtime": "2017-04-10T17:00:17.000Z",
"cksum": 5284,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

View File

@ -0,0 +1,110 @@
[
[
"meta",
"17 path=🌟.txt\n20 ctime=1491843956\n20 atime=1491845601\n23 SCHILY.dev=16777220\n22 SCHILY.ino=9836716\n18 SCHILY.nlink=1\n"
],
[
"ignoredEntry",
{
"extended": {
"atime": "2017-04-10T17:33:21.000Z",
"charset": null,
"comment": null,
"ctime": "2017-04-10T17:05:56.000Z",
"gid": null,
"gname": null,
"linkpath": null,
"mtime": null,
"path": "🌟.txt",
"size": null,
"uid": null,
"uname": null,
"dev": 16777220,
"ino": 9836716,
"nlink": 1,
"global": false
},
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": true,
"path": "🌟.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 106,
"mtime": "2017-04-10T17:05:55.000Z",
"atime": "2017-04-10T17:33:21.000Z",
"ctime": "2017-04-10T17:05:56.000Z",
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "🌟.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 106,
"mtime": "2017-04-10T17:05:55.000Z",
"cksum": 6023,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"entry",
{
"extended": null,
"globalExtended": null,
"type": "File",
"meta": false,
"ignore": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"uname": "isaacs",
"gname": "staff",
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"atime": null,
"ctime": null,
"linkpath": "",
"header": {
"cksumValid": true,
"needPax": false,
"path": "one-byte.txt",
"mode": 420,
"uid": 501,
"gid": 20,
"size": 1,
"mtime": "2017-04-10T16:58:20.000Z",
"cksum": 6121,
"linkpath": "",
"uname": "isaacs",
"gname": "staff",
"devmaj": 0,
"devmin": 0,
"atime": null,
"ctime": null
}
}
],
[
"nullBlock"
],
[
"eof"
],
[
"end"
]
]

Some files were not shown because too many files have changed in this diff Show More