Move to streamx for streams, brittle for testing, and b4a for buffer operations (#143)

* Move to `streamx` for streams and `brittle` for testing

* More `b4a` and octal notation

* More `b4a`
This commit is contained in:
Kasper Isager Dalsgarð 2022-12-06 15:04:04 +00:00 committed by GitHub
parent e08fcd8457
commit 32353cc02c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 754 additions and 800 deletions

23
.github/workflows/test-node.yml vendored Normal file
View file

@ -0,0 +1,23 @@
name: Build Status
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
strategy:
matrix:
node-version: [lts/*]
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm test

1
.gitignore vendored
View file

@ -1,2 +1,3 @@
node_modules
package-lock.json
sandbox.js

View file

@ -1,7 +0,0 @@
language: node_js
node_js:
- '6'
- '8'
- '10'
- '12'
- '14'

View file

@ -1,6 +1,6 @@
# tar-stream
tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
@ -28,7 +28,7 @@ To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback]
``` js
var tar = require('tar-stream')
var pack = tar.pack() // pack is a streams2 stream
var pack = tar.pack() // pack is a stream
// add a file called my-test.txt with the content "Hello World!"
pack.entry({ name: 'my-test.txt' }, 'Hello World!')
@ -136,7 +136,7 @@ pack.pipe(newTarballStream)
var fs = require('fs')
var tar = require('tar-stream')
var pack = tar.pack() // pack is a streams2 stream
var pack = tar.pack() // pack is a stream
var path = 'YourTarBall.tar'
var yourTarball = fs.createWriteStream(path)

View file

@ -1,24 +1,21 @@
var util = require('util')
var bl = require('bl')
var headers = require('./headers')
const bl = require('bl')
const { Writable, PassThrough } = require('streamx')
const headers = require('./headers')
var Writable = require('readable-stream').Writable
var PassThrough = require('readable-stream').PassThrough
const noop = function () {}
var noop = function () {}
var overflow = function (size) {
const overflow = function (size) {
size &= 511
return size && 512 - size
}
var emptyStream = function (self, offset) {
var s = new Source(self, offset)
const emptyStream = function (self, offset) {
const s = new Source(self, offset)
s.end()
return s
}
var mixinPax = function (header, pax) {
const mixinPax = function (header, pax) {
if (pax.path) header.name = pax.path
if (pax.linkpath) header.linkname = pax.linkpath
if (pax.size) header.size = parseInt(pax.size, 10)
@ -26,232 +23,224 @@ var mixinPax = function (header, pax) {
return header
}
var Source = function (self, offset) {
this._parent = self
this.offset = offset
PassThrough.call(this, { autoDestroy: false })
class Source extends PassThrough {
constructor (self, offset) {
super()
this._parent = self
this.offset = offset
}
_predestroy () {
this._parent.destroy()
}
}
util.inherits(Source, PassThrough)
class Extract extends Writable {
constructor (opts) {
super(opts)
Source.prototype.destroy = function (err) {
this._parent.destroy(err)
}
opts = opts || {}
var Extract = function (opts) {
if (!(this instanceof Extract)) return new Extract(opts)
Writable.call(this, opts)
opts = opts || {}
this._offset = 0
this._buffer = bl()
this._missing = 0
this._partial = false
this._onparse = noop
this._header = null
this._stream = null
this._overflow = null
this._cb = null
this._locked = false
this._destroyed = false
this._pax = null
this._paxGlobal = null
this._gnuLongPath = null
this._gnuLongLinkPath = null
var self = this
var b = self._buffer
var oncontinue = function () {
self._continue()
}
var onunlock = function (err) {
self._locked = false
if (err) return self.destroy(err)
if (!self._stream) oncontinue()
}
var onstreamend = function () {
self._stream = null
var drain = overflow(self._header.size)
if (drain) self._parse(drain, ondrain)
else self._parse(512, onheader)
if (!self._locked) oncontinue()
}
var ondrain = function () {
self._buffer.consume(overflow(self._header.size))
self._parse(512, onheader)
oncontinue()
}
var onpaxglobalheader = function () {
var size = self._header.size
self._paxGlobal = headers.decodePax(b.slice(0, size))
b.consume(size)
onstreamend()
}
var onpaxheader = function () {
var size = self._header.size
self._pax = headers.decodePax(b.slice(0, size))
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
b.consume(size)
onstreamend()
}
var ongnulongpath = function () {
var size = self._header.size
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
var ongnulonglinkpath = function () {
var size = self._header.size
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
var onheader = function () {
var offset = self._offset
var header
try {
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
} catch (err) {
self.emit('error', err)
}
b.consume(512)
if (!header) {
self._parse(512, onheader)
oncontinue()
return
}
if (header.type === 'gnu-long-path') {
self._parse(header.size, ongnulongpath)
oncontinue()
return
}
if (header.type === 'gnu-long-link-path') {
self._parse(header.size, ongnulonglinkpath)
oncontinue()
return
}
if (header.type === 'pax-global-header') {
self._parse(header.size, onpaxglobalheader)
oncontinue()
return
}
if (header.type === 'pax-header') {
self._parse(header.size, onpaxheader)
oncontinue()
return
}
if (self._gnuLongPath) {
header.name = self._gnuLongPath
self._gnuLongPath = null
}
if (self._gnuLongLinkPath) {
header.linkname = self._gnuLongLinkPath
self._gnuLongLinkPath = null
}
if (self._pax) {
self._header = header = mixinPax(header, self._pax)
self._pax = null
}
self._locked = true
if (!header.size || header.type === 'directory') {
self._parse(512, onheader)
self.emit('entry', header, emptyStream(self, offset), onunlock)
return
}
self._stream = new Source(self, offset)
self.emit('entry', header, self._stream, onunlock)
self._parse(header.size, onstreamend)
oncontinue()
}
this._onheader = onheader
this._parse(512, onheader)
}
util.inherits(Extract, Writable)
Extract.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
if (err) this.emit('error', err)
this.emit('close')
if (this._stream) this._stream.emit('close')
}
Extract.prototype._parse = function (size, onparse) {
if (this._destroyed) return
this._offset += size
this._missing = size
if (onparse === this._onheader) this._partial = false
this._onparse = onparse
}
Extract.prototype._continue = function () {
if (this._destroyed) return
var cb = this._cb
this._cb = noop
if (this._overflow) this._write(this._overflow, undefined, cb)
else cb()
}
Extract.prototype._write = function (data, enc, cb) {
if (this._destroyed) return
var s = this._stream
var b = this._buffer
var missing = this._missing
if (data.length) this._partial = true
// we do not reach end-of-chunk now. just forward it
if (data.length < missing) {
this._missing -= data.length
this._offset = 0
this._buffer = bl()
this._missing = 0
this._partial = false
this._onparse = noop
this._header = null
this._stream = null
this._overflow = null
if (s) return s.write(data, cb)
b.append(data)
return cb()
this._cb = null
this._locked = false
this._pax = null
this._paxGlobal = null
this._gnuLongPath = null
this._gnuLongLinkPath = null
const self = this
const b = self._buffer
const oncontinue = function () {
self._continue()
}
const onunlock = function (err) {
self._locked = false
if (err) return self.destroy(err)
if (!self._stream) oncontinue()
}
const onstreamend = function () {
self._stream = null
const drain = overflow(self._header.size)
if (drain) self._parse(drain, ondrain)
else self._parse(512, onheader)
if (!self._locked) oncontinue()
}
const ondrain = function () {
self._buffer.consume(overflow(self._header.size))
self._parse(512, onheader)
oncontinue()
}
const onpaxglobalheader = function () {
const size = self._header.size
self._paxGlobal = headers.decodePax(b.slice(0, size))
b.consume(size)
onstreamend()
}
const onpaxheader = function () {
const size = self._header.size
self._pax = headers.decodePax(b.slice(0, size))
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
b.consume(size)
onstreamend()
}
const ongnulongpath = function () {
const size = self._header.size
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
const ongnulonglinkpath = function () {
const size = self._header.size
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
}
const onheader = function () {
const offset = self._offset
let header
try {
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
} catch (err) {
self.destroy(err)
}
b.consume(512)
if (!header) {
self._parse(512, onheader)
oncontinue()
return
}
if (header.type === 'gnu-long-path') {
self._parse(header.size, ongnulongpath)
oncontinue()
return
}
if (header.type === 'gnu-long-link-path') {
self._parse(header.size, ongnulonglinkpath)
oncontinue()
return
}
if (header.type === 'pax-global-header') {
self._parse(header.size, onpaxglobalheader)
oncontinue()
return
}
if (header.type === 'pax-header') {
self._parse(header.size, onpaxheader)
oncontinue()
return
}
if (self._gnuLongPath) {
header.name = self._gnuLongPath
self._gnuLongPath = null
}
if (self._gnuLongLinkPath) {
header.linkname = self._gnuLongLinkPath
self._gnuLongLinkPath = null
}
if (self._pax) {
self._header = header = mixinPax(header, self._pax)
self._pax = null
}
self._locked = true
if (!header.size || header.type === 'directory') {
self._parse(512, onheader)
self.emit('entry', header, emptyStream(self, offset), onunlock)
return
}
self._stream = new Source(self, offset)
self.emit('entry', header, self._stream, onunlock)
self._parse(header.size, onstreamend)
oncontinue()
}
this._onheader = onheader
this._parse(512, onheader)
}
// end-of-chunk. the parser should call cb.
this._cb = cb
this._missing = 0
var overflow = null
if (data.length > missing) {
overflow = data.slice(missing)
data = data.slice(0, missing)
_parse (size, onparse) {
this._offset += size
this._missing = size
if (onparse === this._onheader) this._partial = false
this._onparse = onparse
}
if (s) s.end(data)
else b.append(data)
_continue () {
const cb = this._cb
this._cb = noop
if (this._overflow) this._write(this._overflow, cb)
else cb()
}
this._overflow = overflow
this._onparse()
_write (data, cb) {
const s = this._stream
const b = this._buffer
const missing = this._missing
if (data.byteLength) this._partial = true
// we do not reach end-of-chunk now. just forward it
if (data.byteLength < missing) {
this._missing -= data.byteLength
this._overflow = null
if (s) {
if (s.write(data, cb)) cb()
else s.once('drain', cb)
return
}
b.append(data)
return cb()
}
// end-of-chunk. the parser should call cb.
this._cb = cb
this._missing = 0
let overflow = null
if (data.byteLength > missing) {
overflow = data.subarray(missing)
data = data.subarray(0, missing)
}
if (s) s.end(data)
else b.append(data)
this._overflow = overflow
this._onparse()
}
_final (cb) {
cb(this._partial ? new Error('Unexpected end of data') : null)
}
}
Extract.prototype._final = function (cb) {
if (this._partial) return this.destroy(new Error('Unexpected end of data'))
cb()
module.exports = function extract (opts) {
return new Extract(opts)
}
module.exports = Extract

View file

@ -1,17 +1,17 @@
var alloc = Buffer.alloc
const b4a = require('b4a')
var ZEROS = '0000000000000000000'
var SEVENS = '7777777777777777777'
var ZERO_OFFSET = '0'.charCodeAt(0)
var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary')
var USTAR_VER = Buffer.from('00', 'binary')
var GNU_MAGIC = Buffer.from('ustar\x20', 'binary')
var GNU_VER = Buffer.from('\x20\x00', 'binary')
var MASK = parseInt('7777', 8)
var MAGIC_OFFSET = 257
var VERSION_OFFSET = 263
const ZEROS = '0000000000000000000'
const SEVENS = '7777777777777777777'
const ZERO_OFFSET = '0'.charCodeAt(0)
const USTAR_MAGIC = b4a.from('ustar\x00', 'binary')
const USTAR_VER = b4a.from('00', 'binary')
const GNU_MAGIC = b4a.from('ustar\x20', 'binary')
const GNU_VER = b4a.from('\x20\x00', 'binary')
const MASK = 0o7777
const MAGIC_OFFSET = 257
const VERSION_OFFSET = 263
var clamp = function (index, len, defaultValue) {
const clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
@ -21,7 +21,7 @@ var clamp = function (index, len, defaultValue) {
return 0
}
var toType = function (flag) {
const toType = function (flag) {
switch (flag) {
case 0:
return 'file'
@ -53,7 +53,7 @@ var toType = function (flag) {
return null
}
var toTypeflag = function (flag) {
const toTypeflag = function (flag) {
switch (flag) {
case 'file':
return 0
@ -78,21 +78,21 @@ var toTypeflag = function (flag) {
return 0
}
var indexOf = function (block, num, offset, end) {
const indexOf = function (block, num, offset, end) {
for (; offset < end; offset++) {
if (block[offset] === num) return offset
}
return end
}
var cksum = function (block) {
var sum = 8 * 32
for (var i = 0; i < 148; i++) sum += block[i]
for (var j = 156; j < 512; j++) sum += block[j]
const cksum = function (block) {
let sum = 8 * 32
for (let i = 0; i < 148; i++) sum += block[i]
for (let j = 156; j < 512; j++) sum += block[j]
return sum
}
var encodeOct = function (val, n) {
const encodeOct = function (val, n) {
val = val.toString(8)
if (val.length > n) return SEVENS.slice(0, n) + ' '
else return ZEROS.slice(0, n - val.length) + val + ' '
@ -106,21 +106,22 @@ var encodeOct = function (val, n) {
function parse256 (buf) {
// first byte MUST be either 80 or FF
// 80 for positive, FF for 2's comp
var positive
let positive
if (buf[0] === 0x80) positive = true
else if (buf[0] === 0xFF) positive = false
else return null
// build up a base-256 tuple from the least sig to the highest
var tuple = []
for (var i = buf.length - 1; i > 0; i--) {
var byte = buf[i]
const tuple = []
let i
for (i = buf.length - 1; i > 0; i--) {
const byte = buf[i]
if (positive) tuple.push(byte)
else tuple.push(0xFF - byte)
}
var sum = 0
var l = tuple.length
let sum = 0
const l = tuple.length
for (i = 0; i < l; i++) {
sum += tuple[i] * Math.pow(256, i)
}
@ -128,7 +129,7 @@ function parse256 (buf) {
return positive ? sum : -1 * sum
}
var decodeOct = function (val, offset, length) {
const decodeOct = function (val, offset, length) {
val = val.slice(offset, offset + length)
offset = 0
@ -138,20 +139,20 @@ var decodeOct = function (val, offset, length) {
} else {
// Older versions of tar can prefix with spaces
while (offset < val.length && val[offset] === 32) offset++
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
const end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
while (offset < end && val[offset] === 0) offset++
if (end === offset) return 0
return parseInt(val.slice(offset, end).toString(), 8)
}
}
var decodeStr = function (val, offset, length, encoding) {
const decodeStr = function (val, offset, length, encoding) {
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
}
var addLength = function (str) {
var len = Buffer.byteLength(str)
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
const addLength = function (str) {
const len = b4a.byteLength(str)
let digits = Math.floor(Math.log(len) / Math.log(10)) + 1
if (len + digits >= Math.pow(10, digits)) digits++
return (len + digits) + str
@ -162,29 +163,29 @@ exports.decodeLongPath = function (buf, encoding) {
}
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
var result = ''
let result = ''
if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
var pax = opts.pax
const pax = opts.pax
if (pax) {
for (var key in pax) {
for (const key in pax) {
result += addLength(' ' + key + '=' + pax[key] + '\n')
}
}
return Buffer.from(result)
return b4a.from(result)
}
exports.decodePax = function (buf) {
var result = {}
const result = {}
while (buf.length) {
var i = 0
let i = 0
while (i < buf.length && buf[i] !== 32) i++
var len = parseInt(buf.slice(0, i).toString(), 10)
const len = parseInt(buf.slice(0, i).toString(), 10)
if (!len) return result
var b = buf.slice(i + 1, len - 1).toString()
var keyIndex = b.indexOf('=')
const b = buf.slice(i + 1, len - 1).toString()
const keyIndex = b.indexOf('=')
if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
@ -195,65 +196,65 @@ exports.decodePax = function (buf) {
}
exports.encode = function (opts) {
var buf = alloc(512)
var name = opts.name
var prefix = ''
const buf = b4a.alloc(512)
let name = opts.name
let prefix = ''
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (Buffer.byteLength(name) !== name.length) return null // utf-8
if (b4a.byteLength(name) !== name.length) return null // utf-8
while (Buffer.byteLength(name) > 100) {
var i = name.indexOf('/')
while (b4a.byteLength(name) > 100) {
const i = name.indexOf('/')
if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i + 1)
}
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
if (b4a.byteLength(name) > 100 || b4a.byteLength(prefix) > 155) return null
if (opts.linkname && b4a.byteLength(opts.linkname) > 100) return null
buf.write(name)
buf.write(encodeOct(opts.mode & MASK, 6), 100)
buf.write(encodeOct(opts.uid, 6), 108)
buf.write(encodeOct(opts.gid, 6), 116)
buf.write(encodeOct(opts.size, 11), 124)
buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
b4a.write(buf, name)
b4a.write(buf, encodeOct(opts.mode & MASK, 6), 100)
b4a.write(buf, encodeOct(opts.uid, 6), 108)
b4a.write(buf, encodeOct(opts.gid, 6), 116)
b4a.write(buf, encodeOct(opts.size, 11), 124)
b4a.write(buf, encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
if (opts.linkname) buf.write(opts.linkname, 157)
if (opts.linkname) b4a.write(buf, opts.linkname, 157)
USTAR_MAGIC.copy(buf, MAGIC_OFFSET)
USTAR_VER.copy(buf, VERSION_OFFSET)
if (opts.uname) buf.write(opts.uname, 265)
if (opts.gname) buf.write(opts.gname, 297)
buf.write(encodeOct(opts.devmajor || 0, 6), 329)
buf.write(encodeOct(opts.devminor || 0, 6), 337)
b4a.copy(USTAR_MAGIC, buf, MAGIC_OFFSET)
b4a.copy(USTAR_VER, buf, VERSION_OFFSET)
if (opts.uname) b4a.write(buf, opts.uname, 265)
if (opts.gname) b4a.write(buf, opts.gname, 297)
b4a.write(buf, encodeOct(opts.devmajor || 0, 6), 329)
b4a.write(buf, encodeOct(opts.devminor || 0, 6), 337)
if (prefix) buf.write(prefix, 345)
if (prefix) b4a.write(buf, prefix, 345)
buf.write(encodeOct(cksum(buf), 6), 148)
b4a.write(buf, encodeOct(cksum(buf), 6), 148)
return buf
}
exports.decode = function (buf, filenameEncoding, allowUnknownFormat) {
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
let typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
var name = decodeStr(buf, 0, 100, filenameEncoding)
var mode = decodeOct(buf, 100, 8)
var uid = decodeOct(buf, 108, 8)
var gid = decodeOct(buf, 116, 8)
var size = decodeOct(buf, 124, 12)
var mtime = decodeOct(buf, 136, 12)
var type = toType(typeflag)
var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
var uname = decodeStr(buf, 265, 32)
var gname = decodeStr(buf, 297, 32)
var devmajor = decodeOct(buf, 329, 8)
var devminor = decodeOct(buf, 337, 8)
let name = decodeStr(buf, 0, 100, filenameEncoding)
const mode = decodeOct(buf, 100, 8)
const uid = decodeOct(buf, 108, 8)
const gid = decodeOct(buf, 116, 8)
const size = decodeOct(buf, 124, 12)
const mtime = decodeOct(buf, 136, 12)
const type = toType(typeflag)
const linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
const uname = decodeStr(buf, 265, 32)
const gname = decodeStr(buf, 297, 32)
const devmajor = decodeOct(buf, 329, 8)
const devminor = decodeOct(buf, 337, 8)
var c = cksum(buf)
const c = cksum(buf)
// checksum is still initial value if header was null.
if (c === 8 * 32) return null

388
pack.js
View file

@ -1,24 +1,20 @@
var constants = require('fs-constants')
var eos = require('end-of-stream')
var inherits = require('inherits')
var alloc = Buffer.alloc
const { constants } = require('fs')
const { Readable, Writable } = require('streamx')
const { StringDecoder } = require('string_decoder')
const b4a = require('b4a')
var Readable = require('readable-stream').Readable
var Writable = require('readable-stream').Writable
var StringDecoder = require('string_decoder').StringDecoder
const headers = require('./headers')
var headers = require('./headers')
const DMODE = 0o755
const FMODE = 0o644
var DMODE = parseInt('755', 8)
var FMODE = parseInt('644', 8)
const END_OF_TAR = b4a.alloc(1024)
var END_OF_TAR = alloc(1024)
const noop = function () {}
var noop = function () {}
var overflow = function (self, size) {
const overflow = function (self, size) {
size &= 511
if (size) self.push(END_OF_TAR.slice(0, 512 - size))
if (size) self.push(END_OF_TAR.subarray(0, 512 - size))
}
function modeToType (mode) {
@ -33,223 +29,187 @@ function modeToType (mode) {
return 'file'
}
var Sink = function (to) {
Writable.call(this)
this.written = 0
this._to = to
this._destroyed = false
}
inherits(Sink, Writable)
Sink.prototype._write = function (data, enc, cb) {
this.written += data.length
if (this._to.push(data)) return cb()
this._to._drain = cb
}
Sink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var LinkSink = function () {
Writable.call(this)
this.linkname = ''
this._decoder = new StringDecoder('utf-8')
this._destroyed = false
}
inherits(LinkSink, Writable)
LinkSink.prototype._write = function (data, enc, cb) {
this.linkname += this._decoder.write(data)
cb()
}
LinkSink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Void = function () {
Writable.call(this)
this._destroyed = false
}
inherits(Void, Writable)
Void.prototype._write = function (data, enc, cb) {
cb(new Error('No body allowed for this entry'))
}
Void.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Pack = function (opts) {
if (!(this instanceof Pack)) return new Pack(opts)
Readable.call(this, opts)
this._drain = noop
this._finalized = false
this._finalizing = false
this._destroyed = false
this._stream = null
}
inherits(Pack, Readable)
Pack.prototype.entry = function (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this._destroyed) return
if (typeof buffer === 'function') {
callback = buffer
buffer = null
class Sink extends Writable {
constructor (to) {
super()
this.written = 0
this._to = to
}
if (!callback) callback = noop
_write (data, cb) {
this.written += data.byteLength
if (this._to.push(data)) return cb()
this._to._drain = cb
}
}
var self = this
class LinkSink extends Writable {
constructor () {
super()
this.linkname = ''
this._decoder = new StringDecoder('utf-8')
}
if (!header.size || header.type === 'symlink') header.size = 0
if (!header.type) header.type = modeToType(header.mode)
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
if (!header.uid) header.uid = 0
if (!header.gid) header.gid = 0
if (!header.mtime) header.mtime = new Date()
_write (data, cb) {
this.linkname += this._decoder.write(data)
cb()
}
}
class Void extends Writable {
_write (data, cb) {
cb(new Error('No body allowed for this entry'))
}
}
class Pack extends Readable {
constructor (opts) {
super(opts)
this._drain = noop
this._finalized = false
this._finalizing = false
this._stream = null
}
entry (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this.destroyed) return
if (typeof buffer === 'function') {
callback = buffer
buffer = null
}
if (!callback) callback = noop
const self = this
if (!header.size || header.type === 'symlink') header.size = 0
if (!header.type) header.type = modeToType(header.mode)
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
if (!header.uid) header.uid = 0
if (!header.gid) header.gid = 0
if (!header.mtime) header.mtime = new Date()
if (typeof buffer === 'string') buffer = b4a.from(buffer)
if (b4a.isBuffer(buffer)) {
header.size = buffer.byteLength
this._encode(header)
const ok = this.push(buffer)
overflow(self, header.size)
if (ok) process.nextTick(callback)
else this._drain = callback
return new Void()
}
if (header.type === 'symlink' && !header.linkname) {
const linkSink = new LinkSink()
linkSink
.on('error', function (err) {
self.destroy()
callback(err)
})
.on('close', function () {
header.linkname = linkSink.linkname
self._encode(header)
callback()
})
return linkSink
}
if (typeof buffer === 'string') buffer = Buffer.from(buffer)
if (Buffer.isBuffer(buffer)) {
header.size = buffer.length
this._encode(header)
var ok = this.push(buffer)
overflow(self, header.size)
if (ok) process.nextTick(callback)
else this._drain = callback
return new Void()
}
if (header.type === 'symlink' && !header.linkname) {
var linkSink = new LinkSink()
eos(linkSink, function (err) {
if (err) { // stream was closed
if (header.type !== 'file' && header.type !== 'contiguous-file') {
process.nextTick(callback)
return new Void()
}
const sink = new Sink(this)
sink
.on('error', function (err) {
self._stream = null
self.destroy()
return callback(err)
}
callback(err)
})
.on('close', function () {
self._stream = null
header.linkname = linkSink.linkname
self._encode(header)
callback()
})
if (sink.written !== header.size) { // corrupting tar
}
return linkSink
overflow(self, header.size)
if (self._finalizing) { self.finalize() }
callback()
})
this._stream = sink
return sink
}
this._encode(header)
if (header.type !== 'file' && header.type !== 'contiguous-file') {
process.nextTick(callback)
return new Void()
}
var sink = new Sink(this)
this._stream = sink
eos(sink, function (err) {
self._stream = null
if (err) { // stream was closed
self.destroy()
return callback(err)
}
if (sink.written !== header.size) { // corrupting tar
self.destroy()
return callback(new Error('size mismatch'))
}
overflow(self, header.size)
if (self._finalizing) self.finalize()
callback()
})
return sink
}
Pack.prototype.finalize = function () {
if (this._stream) {
this._finalizing = true
return
}
if (this._finalized) return
this._finalized = true
this.push(END_OF_TAR)
this.push(null)
}
Pack.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
if (err) this.emit('error', err)
this.emit('close')
if (this._stream && this._stream.destroy) this._stream.destroy()
}
Pack.prototype._encode = function (header) {
if (!header.pax) {
var buf = headers.encode(header)
if (buf) {
this.push(buf)
finalize () {
if (this._stream) {
this._finalizing = true
return
}
}
this._encodePax(header)
}
Pack.prototype._encodePax = function (header) {
var paxHeader = headers.encodePax({
name: header.name,
linkname: header.linkname,
pax: header.pax
})
var newHeader = {
name: 'PaxHeader',
mode: header.mode,
uid: header.uid,
gid: header.gid,
size: paxHeader.length,
mtime: header.mtime,
type: 'pax-header',
linkname: header.linkname && 'PaxHeader',
uname: header.uname,
gname: header.gname,
devmajor: header.devmajor,
devminor: header.devminor
if (this._finalized) return
this._finalized = true
this.push(END_OF_TAR)
this.push(null)
}
this.push(headers.encode(newHeader))
this.push(paxHeader)
overflow(this, paxHeader.length)
_encode (header) {
if (!header.pax) {
const buf = headers.encode(header)
if (buf) {
this.push(buf)
return
}
}
this._encodePax(header)
}
newHeader.size = header.size
newHeader.type = header.type
this.push(headers.encode(newHeader))
_encodePax (header) {
const paxHeader = headers.encodePax({
name: header.name,
linkname: header.linkname,
pax: header.pax
})
const newHeader = {
name: 'PaxHeader',
mode: header.mode,
uid: header.uid,
gid: header.gid,
size: paxHeader.byteLength,
mtime: header.mtime,
type: 'pax-header',
linkname: header.linkname && 'PaxHeader',
uname: header.uname,
gname: header.gname,
devmajor: header.devmajor,
devminor: header.devminor
}
this.push(headers.encode(newHeader))
this.push(paxHeader)
overflow(this, paxHeader.byteLength)
newHeader.size = header.size
newHeader.type = header.type
this.push(headers.encode(newHeader))
}
_read (cb) {
const drain = this._drain
this._drain = noop
drain()
cb()
}
}
Pack.prototype._read = function (n) {
var drain = this._drain
this._drain = noop
drain()
module.exports = function pack (opts) {
return new Pack(opts)
}
module.exports = Pack

View file

@ -1,58 +1,32 @@
{
"name": "tar-stream",
"version": "2.2.0",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
"author": "Mathias Buus <mathiasbuus@gmail.com>",
"dependencies": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
},
"devDependencies": {
"concat-stream": "^2.0.0",
"standard": "^12.0.1",
"tape": "^4.9.2"
},
"scripts": {
"test": "standard && tape test/extract.js test/pack.js",
"test-all": "standard && tape test/*.js"
},
"keywords": [
"tar",
"tarball",
"parse",
"parser",
"generate",
"generator",
"stream",
"stream2",
"streams",
"streams2",
"streaming",
"pack",
"extract",
"modify"
],
"bugs": {
"url": "https://github.com/mafintosh/tar-stream/issues"
},
"homepage": "https://github.com/mafintosh/tar-stream",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
"main": "index.js",
"files": [
"*.js",
"LICENSE"
"*.js"
],
"directories": {
"test": "test"
"scripts": {
"test": "standard && brittle test/*.js"
},
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/mafintosh/tar-stream.git"
},
"engines": {
"node": ">=6"
"author": "Mathias Buus <mathiasbuus@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/mafintosh/tar-stream/issues"
},
"homepage": "https://github.com/mafintosh/tar-stream",
"dependencies": {
"b4a": "^1.6.1",
"bl": "^6.0.0",
"streamx": "^2.12.5"
},
"devDependencies": {
"brittle": "^3.1.1",
"concat-stream": "^2.0.0",
"standard": "^17.0.0"
}
}

View file

@ -1,10 +1,10 @@
var test = require('tape')
var tar = require('../index')
var fixtures = require('./fixtures')
var concat = require('concat-stream')
var fs = require('fs')
const test = require('brittle')
const concat = require('concat-stream')
const fs = require('fs')
const tar = require('..')
const fixtures = require('./fixtures')
var clamp = function (index, len, defaultValue) {
const clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
@ -17,13 +17,13 @@ var clamp = function (index, len, defaultValue) {
test('one-file', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'test.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -38,8 +38,8 @@ test('one-file', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
t.is(data.toString(), 'hello world\n')
cb()
}))
})
@ -53,13 +53,13 @@ test('one-file', function (t) {
test('chunked-one-file', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'test.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -74,8 +74,8 @@ test('chunked-one-file', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
t.is(data.toString(), 'hello world\n')
cb()
}))
})
@ -83,10 +83,10 @@ test('chunked-one-file', function (t) {
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.ONE_FILE_TAR)
const b = fs.readFileSync(fixtures.ONE_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
for (let i = 0; i < b.length; i += 321) {
extract.write(b.subarray(i, clamp(i + 321, b.length, b.length)))
}
extract.end()
})
@ -94,13 +94,13 @@ test('chunked-one-file', function (t) {
test('multi-file', function (t) {
t.plan(5)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
var onfile1 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile1 = function (header, stream, cb) {
t.alike(header, {
name: 'file-1.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -115,15 +115,15 @@ test('multi-file', function (t) {
extract.on('entry', onfile2)
stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n')
callback()
t.is(data.toString(), 'i am file-1\n')
cb()
}))
}
var onfile2 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile2 = function (header, stream, cb) {
t.alike(header, {
name: 'file-2.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -138,8 +138,8 @@ test('multi-file', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
t.is(data.toString(), 'i am file-2\n')
cb()
}))
}
@ -155,13 +155,13 @@ test('multi-file', function (t) {
test('chunked-multi-file', function (t) {
t.plan(5)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
var onfile1 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile1 = function (header, stream, cb) {
t.alike(header, {
name: 'file-1.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -176,15 +176,15 @@ test('chunked-multi-file', function (t) {
extract.on('entry', onfile2)
stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n')
callback()
t.is(data.toString(), 'i am file-1\n')
cb()
}))
}
var onfile2 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile2 = function (header, stream, cb) {
t.alike(header, {
name: 'file-2.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -199,8 +199,8 @@ test('chunked-multi-file', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
t.is(data.toString(), 'i am file-2\n')
cb()
}))
}
@ -210,9 +210,9 @@ test('chunked-multi-file', function (t) {
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
const b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
for (let i = 0; i < b.length; i += 321) {
extract.write(b.subarray(i, clamp(i + 321, b.length, b.length)))
}
extract.end()
})
@ -220,13 +220,13 @@ test('chunked-multi-file', function (t) {
test('pax', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'pax.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -242,8 +242,8 @@ test('pax', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
t.is(data.toString(), 'hello world\n')
cb()
}))
})
@ -257,13 +257,13 @@ test('pax', function (t) {
test('types', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
var ondir = function (header, stream, callback) {
t.deepEqual(header, {
const ondir = function (header, stream, cb) {
t.alike(header, {
name: 'directory',
mode: parseInt('755', 8),
mode: 0o755,
uid: 501,
gid: 20,
size: 0,
@ -279,13 +279,13 @@ test('types', function (t) {
t.ok(false)
})
extract.once('entry', onlink)
callback()
cb()
}
var onlink = function (header, stream, callback) {
t.deepEqual(header, {
const onlink = function (header, stream, cb) {
t.alike(header, {
name: 'directory-link',
mode: parseInt('755', 8),
mode: 0o755,
uid: 501,
gid: 20,
size: 0,
@ -301,7 +301,7 @@ test('types', function (t) {
t.ok(false)
})
noEntries = true
callback()
cb()
}
extract.once('entry', ondir)
@ -316,13 +316,13 @@ test('types', function (t) {
test('long-name', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 16,
@ -337,8 +337,8 @@ test('long-name', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello long name\n')
callback()
t.is(data.toString(), 'hello long name\n')
cb()
}))
})
@ -352,13 +352,13 @@ test('long-name', function (t) {
test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'høllø.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 4,
@ -374,8 +374,8 @@ test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hej\n')
callback()
t.is(data.toString(), 'hej\n')
cb()
}))
})
@ -389,13 +389,13 @@ test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'høstål.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 8,
@ -411,8 +411,8 @@ test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'høllø\n')
callback()
t.is(data.toString(), 'høllø\n')
cb()
}))
})
@ -426,19 +426,19 @@ test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
test('name-is-100', function (t) {
t.plan(3)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
t.same(header.name.length, 100)
extract.on('entry', function (header, stream, cb) {
t.is(header.name.length, 100)
stream.pipe(concat(function (data) {
t.same(data.toString(), 'hello\n')
callback()
t.is(data.toString(), 'hello\n')
cb()
}))
})
extract.on('finish', function () {
t.ok(true)
t.pass()
})
extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
@ -447,7 +447,7 @@ test('name-is-100', function (t) {
test('invalid-file', function (t) {
t.plan(1)
var extract = tar.extract()
const extract = tar.extract()
extract.on('error', function (err) {
t.ok(!!err)
@ -460,15 +460,15 @@ test('invalid-file', function (t) {
test('space prefixed', function (t) {
t.plan(5)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
t.ok(true)
callback()
extract.on('entry', function (header, stream, cb) {
t.pass()
cb()
})
extract.on('finish', function () {
t.ok(true)
t.pass()
})
extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
@ -477,15 +477,15 @@ test('space prefixed', function (t) {
test('gnu long path', function (t) {
t.plan(2)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
extract.on('entry', function (header, stream, cb) {
t.ok(header.name.length > 100)
callback()
cb()
})
extract.on('finish', function () {
t.ok(true)
t.pass()
})
extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
@ -493,12 +493,12 @@ test('gnu long path', function (t) {
test('base 256 uid and gid', function (t) {
t.plan(2)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
extract.on('entry', function (header, stream, cb) {
t.ok(header.uid === 116435139)
t.ok(header.gid === 1876110778)
callback()
cb()
})
extract.end(fs.readFileSync(fixtures.BASE_256_UID_GID))
@ -507,12 +507,12 @@ test('base 256 uid and gid', function (t) {
test('base 256 size', function (t) {
t.plan(2)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'test.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -524,11 +524,11 @@ test('base 256 size', function (t) {
devmajor: 0,
devminor: 0
})
callback()
cb()
})
extract.on('finish', function () {
t.ok(true)
t.pass()
})
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
@ -538,13 +538,13 @@ test('latin-1', function (t) { // can unpack filenames encoded in latin-1
t.plan(3)
// This is the older name for the "latin1" encoding in Node
var extract = tar.extract({ filenameEncoding: 'binary' })
var noEntries = false
const extract = tar.extract({ filenameEncoding: 'binary' })
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'En français, s\'il vous plaît?.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 0,
gid: 0,
size: 14,
@ -559,8 +559,8 @@ test('latin-1', function (t) { // can unpack filenames encoded in latin-1
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'Hello, world!\n')
callback()
t.is(data.toString(), 'Hello, world!\n')
cb()
}))
})
@ -574,14 +574,14 @@ test('latin-1', function (t) { // can unpack filenames encoded in latin-1
test('incomplete', function (t) {
t.plan(1)
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
callback()
extract.on('entry', function (header, stream, cb) {
cb()
})
extract.on('error', function (err) {
t.same(err.message, 'Unexpected end of data')
t.is(err.message, 'Unexpected end of data')
})
extract.on('finish', function () {
@ -594,13 +594,13 @@ test('incomplete', function (t) {
test('gnu', function (t) { // can correctly unpack gnu-tar format
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'test.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 12345,
gid: 67890,
size: 14,
@ -615,8 +615,8 @@ test('gnu', function (t) { // can correctly unpack gnu-tar format
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'Hello, world!\n')
callback()
t.is(data.toString(), 'Hello, world!\n')
cb()
}))
})
@ -634,13 +634,13 @@ test('gnu-incremental', function (t) {
// for a directory prefix (also offset 345).
t.plan(3)
var extract = tar.extract()
var noEntries = false
const extract = tar.extract()
let noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
extract.on('entry', function (header, stream, cb) {
t.alike(header, {
name: 'test.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 12345,
gid: 67890,
size: 14,
@ -655,8 +655,8 @@ test('gnu-incremental', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'Hello, world!\n')
callback()
t.is(data.toString(), 'Hello, world!\n')
cb()
}))
})
@ -670,7 +670,7 @@ test('gnu-incremental', function (t) {
test('v7 unsupported', function (t) { // correctly fails to parse v7 tarballs
t.plan(1)
var extract = tar.extract()
const extract = tar.extract()
extract.on('error', function (err) {
t.ok(!!err)
@ -683,7 +683,7 @@ test('v7 unsupported', function (t) { // correctly fails to parse v7 tarballs
test('unknown format doesn\'t extract by default', function (t) {
t.plan(1)
var extract = tar.extract()
const extract = tar.extract()
extract.on('error', function (err) {
t.ok(!!err)
@ -696,13 +696,13 @@ test('unknown format doesn\'t extract by default', function (t) {
test('unknown format attempts to extract if allowed', function (t) {
t.plan(5)
var extract = tar.extract({ allowUnknownFormat: true })
var noEntries = false
const extract = tar.extract({ allowUnknownFormat: true })
let noEntries = false
var onfile1 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile1 = function (header, stream, cb) {
t.alike(header, {
name: 'file-1.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -717,15 +717,15 @@ test('unknown format attempts to extract if allowed', function (t) {
extract.on('entry', onfile2)
stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n')
callback()
t.is(data.toString(), 'i am file-1\n')
cb()
}))
}
var onfile2 = function (header, stream, callback) {
t.deepEqual(header, {
const onfile2 = function (header, stream, cb) {
t.alike(header, {
name: 'file-2.txt',
mode: parseInt('644', 8),
mode: 0o644,
uid: 501,
gid: 20,
size: 12,
@ -740,8 +740,8 @@ test('unknown format attempts to extract if allowed', function (t) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
t.is(data.toString(), 'i am file-2\n')
cb()
}))
}

View file

@ -1,4 +1,4 @@
var path = require('path')
const path = require('path')
exports.ONE_FILE_TAR = path.join(__dirname, 'one-file.tar')
exports.MULTI_FILE_TAR = path.join(__dirname, 'multi-file.tar')

View file

@ -1,19 +1,20 @@
var test = require('tape')
var tar = require('../index')
var fixtures = require('./fixtures')
var concat = require('concat-stream')
var fs = require('fs')
var Writable = require('readable-stream').Writable
const test = require('brittle')
const concat = require('concat-stream')
const fs = require('fs')
const b4a = require('b4a')
const { Writable } = require('streamx')
const tar = require('..')
const fixtures = require('./fixtures')
test('one-file', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'test.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -23,20 +24,20 @@ test('one-file', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.ONE_FILE_TAR))
}))
})
test('multi-file', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'file-1.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -46,7 +47,7 @@ test('multi-file', function (t) {
pack.entry({
name: 'file-2.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
size: 12,
uname: 'maf',
gname: 'staff',
@ -57,20 +58,20 @@ test('multi-file', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.MULTI_FILE_TAR))
}))
})
test('pax', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'pax.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -81,21 +82,21 @@ test('pax', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
// fs.writeFileSync('tmp.tar', data)
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.PAX_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.PAX_TAR))
}))
})
test('types', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'directory',
mtime: new Date(1387580181000),
type: 'directory',
mode: parseInt('755', 8),
mode: 0o755,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -107,7 +108,7 @@ test('types', function (t) {
mtime: new Date(1387580181000),
type: 'symlink',
linkname: 'directory',
mode: parseInt('755', 8),
mode: 0o755,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -118,20 +119,21 @@ test('types', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.TYPES_TAR))
}))
})
test('long-name', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mtime: new Date(1387580181000),
type: 'file',
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -141,19 +143,20 @@ test('long-name', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.LONG_NAME_TAR))
}))
})
test('large-uid-gid', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'test.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 1000000001,
@ -163,21 +166,22 @@ test('large-uid-gid', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.LARGE_UID_GID))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.LARGE_UID_GID))
fs.writeFileSync('/tmp/foo', data)
}))
})
test('unicode', function (t) {
t.plan(2)
var pack = tar.pack()
const pack = tar.pack()
pack.entry({
name: 'høstål.txt',
mtime: new Date(1387580181000),
type: 'file',
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
@ -187,43 +191,49 @@ test('unicode', function (t) {
pack.finalize()
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR))
t.is(data.length & 511, 0)
t.alike(data, fs.readFileSync(fixtures.UNICODE_TAR))
}))
})
test('backpressure', function (t) {
var slowWritable = new Writable({ highWaterMark: 1 })
slowWritable._write = (chunk, enc, next) => {
setImmediate(next)
}
test('backpressure', async function (t) {
const end = t.test('end')
end.plan(1)
var pack = tar.pack()
var later = false
const slowStream = new Writable({
highWaterMark: 1,
setImmediate(() => {
later = true
write (data, cb) {
setImmediate(cb)
}
})
pack.pipe(slowWritable)
slowStream.on('finish', () => end.pass())
slowWritable.on('finish', () => t.end())
pack.on('end', () => t.ok(later))
const pack = tar.pack()
var i = 0
var next = () => {
let later = false
setImmediate(() => { later = true })
pack
.on('end', () => t.ok(later))
.pipe(slowStream)
let i = 0
const next = () => {
if (++i < 25) {
var header = {
const header = {
name: `file${i}.txt`,
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
mode: 0o644,
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}
var buffer = Buffer.alloc(1024)
const buffer = b4a.alloc(1024)
pack.entry(header, buffer, next)
} else {
@ -232,4 +242,6 @@ test('backpressure', function (t) {
}
next()
await end
})

View file

@ -1,27 +1,28 @@
var test = require('tape')
var stream = require('readable-stream')
var zlib = require('zlib')
var fs = require('fs')
var tar = require('../')
var fixtures = require('./fixtures')
const test = require('brittle')
const zlib = require('zlib')
const fs = require('fs')
const { Writable } = require('streamx')
const tar = require('../..')
const fixtures = require('../fixtures')
test('huge', function (t) {
t.plan(1)
var extract = tar.extract()
var noEntries = false
var hugeFileSize = 8804630528 // ~8.2GB
var dataLength = 0
const extract = tar.extract()
let noEntries = false
const hugeFileSize = 8804630528 // ~8.2GB
let dataLength = 0
var countStream = new stream.Writable()
countStream._write = function (chunk, encoding, done) {
dataLength += chunk.length
done()
}
const countStream = new Writable({
write (data, cb) {
dataLength += data.length
cb()
}
})
// Make sure we read the correct pax size entry for a file larger than 8GB.
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
t.alike(header, {
devmajor: 0,
devminor: 0,
gid: 20,
@ -51,10 +52,10 @@ test('huge', function (t) {
extract.on('finish', function () {
t.ok(noEntries)
t.equal(dataLength, hugeFileSize)
t.is(dataLength, hugeFileSize)
})
var gunzip = zlib.createGunzip()
var reader = fs.createReadStream(fixtures.HUGE)
const gunzip = zlib.createGunzip()
const reader = fs.createReadStream(fixtures.HUGE)
reader.pipe(gunzip).pipe(extract)
})