Merge branch 'master' of github.com:mafintosh/tar-stream

Conflicts:
	headers.js
This commit is contained in:
Jesús Leganés Combarro "piranna 2015-10-15 23:25:10 +02:00
commit 484c71ca0c
6 changed files with 30 additions and 25 deletions

View file

@ -6,26 +6,26 @@ var headers = require('./headers')
var Writable = require('readable-stream').Writable
var PassThrough = require('readable-stream').PassThrough
var noop = function() {}
var noop = function () {}
var overflow = function(size) {
var overflow = function (size) {
size &= 511
return size && 512 - size
}
var emptyStream = function(self, offset) {
var emptyStream = function (self, offset) {
var s = new Source(self, offset)
s.end()
return s
}
var mixinPax = function(header, pax) {
var mixinPax = function (header, pax) {
if (pax.path) header.name = pax.path
if (pax.linkpath) header.linkname = pax.linkpath
return header
}
var Source = function(self, offset) {
var Source = function (self, offset) {
this._parent = self
this.offset = offset
PassThrough.call(this)
@ -33,11 +33,11 @@ var Source = function(self, offset) {
util.inherits(Source, PassThrough)
Source.prototype.destroy = function(err) {
Source.prototype.destroy = function (err) {
this._parent.destroy(err)
}
var Extract = function(opts) {
var Extract = function (opts) {
if (!(this instanceof Extract)) return new Extract(opts)
Writable.call(this, opts)
@ -59,17 +59,17 @@ var Extract = function(opts) {
var self = this
var b = self._buffer
var oncontinue = function() {
var oncontinue = function () {
self._continue()
}
var onunlock = function(err) {
var onunlock = function (err) {
self._locked = false
if (err) return self.destroy(err)
if (!self._stream) oncontinue()
}
var onstreamend = function() {
var onstreamend = function () {
self._stream = null
var drain = overflow(self._header.size)
if (drain) self._parse(drain, ondrain)
@ -77,20 +77,20 @@ var Extract = function(opts) {
if (!self._locked) oncontinue()
}
var ondrain = function() {
var ondrain = function () {
self._buffer.consume(overflow(self._header.size))
self._parse(512, onheader)
oncontinue()
}
var onpaxglobalheader = function() {
var onpaxglobalheader = function () {
var size = self._header.size
self._paxGlobal = headers.decodePax(b.slice(0, size))
b.consume(size)
onstreamend()
}
var onpaxheader = function() {
var onpaxheader = function () {
var size = self._header.size
self._pax = headers.decodePax(b.slice(0, size))
if (self._paxGlobal) self._pax = xtend(self._paxGlobal, self._pax)
@ -98,21 +98,21 @@ var Extract = function(opts) {
onstreamend()
}
var ongnulongpath = function() {
var ongnulongpath = function () {
var size = self._header.size
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size))
b.consume(size)
onstreamend()
}
var ongnulonglinkpath = function() {
var ongnulonglinkpath = function () {
var size = self._header.size
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size))
b.consume(size)
onstreamend()
}
var onheader = function() {
var onheader = function () {
var offset = self._offset
var header
try {
@ -183,7 +183,7 @@ var Extract = function(opts) {
util.inherits(Extract, Writable)
Extract.prototype.destroy = function(err) {
Extract.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
@ -192,14 +192,14 @@ Extract.prototype.destroy = function(err) {
if (this._stream) this._stream.emit('close')
}
Extract.prototype._parse = function(size, onparse) {
Extract.prototype._parse = function (size, onparse) {
if (this._destroyed) return
this._offset += size
this._missing = size
this._onparse = onparse
}
Extract.prototype._continue = function(err) {
Extract.prototype._continue = function () {
if (this._destroyed) return
var cb = this._cb
this._cb = noop
@ -207,7 +207,7 @@ Extract.prototype._continue = function(err) {
else cb()
}
Extract.prototype._write = function(data, enc, cb) {
Extract.prototype._write = function (data, enc, cb) {
if (this._destroyed) return
var s = this._stream

View file

@ -190,13 +190,15 @@ exports.encode = function(opts) {
}
exports.decode = function(buf) {
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
var name = decodeStr(buf, 0, 100)
var mode = decodeOct(buf, 100)
var uid = decodeOct(buf, 108)
var gid = decodeOct(buf, 116)
var size = decodeOct(buf, 124)
var mtime = decodeOct(buf, 136)
var type = toType(buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET)
var type = toType(typeflag)
var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100)
var uname = decodeStr(buf, 265, 32)
var gname = decodeStr(buf, 297, 32)
@ -205,6 +207,9 @@ exports.decode = function(buf) {
if (buf[345]) name = decodeStr(buf, 345, 155)+'/'+name
// to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
var c = cksum(buf)
//checksum is still initial value if header was null.

View file

@ -1,2 +1,2 @@
exports.extract = require('./extract')
exports.pack = require('./pack')
exports.pack = require('./pack')

View file

@ -1,6 +1,6 @@
{
"name": "tar-stream",
"version": "1.2.1",
"version": "1.2.2",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
"author": "Mathias Buus <mathiasbuus@gmail.com>",
"engines": {

View file

@ -9,4 +9,4 @@ exports.UNICODE_TAR = path.join(__dirname, 'unicode.tar')
exports.NAME_IS_100_TAR = path.join(__dirname, 'name-is-100.tar')
exports.INVALID_TGZ = path.join(__dirname, 'invalid.tgz')
exports.SPACE_TAR_GZ = path.join(__dirname, 'space.tar')
exports.GNU_LONG_PATH = path.join(__dirname, 'gnu-long-path.tar')
exports.GNU_LONG_PATH = path.join(__dirname, 'gnu-long-path.tar')

View file

@ -141,4 +141,4 @@ test('unicode', function(t) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR))
}))
})
})