be standard

This commit is contained in:
Mathias Buus 2015-11-06 15:30:21 -08:00
parent af1abde6a4
commit 33a55ac474
5 changed files with 208 additions and 212 deletions

View file

@ -3,7 +3,7 @@ var ZERO_OFFSET = '0'.charCodeAt(0)
var USTAR = 'ustar\x0000' var USTAR = 'ustar\x0000'
var MASK = parseInt('7777', 8) var MASK = parseInt('7777', 8)
var clamp = function(index, len, defaultValue) { var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer. index = ~~index // Coerce to integer.
if (index >= len) return len if (index >= len) return len
@ -13,7 +13,7 @@ var clamp = function(index, len, defaultValue) {
return 0 return 0
} }
var toType = function(flag) { var toType = function (flag) {
switch (flag) { switch (flag) {
case 0: case 0:
return 'file' return 'file'
@ -45,7 +45,7 @@ var toType = function(flag) {
return null return null
} }
var toTypeflag = function(flag) { var toTypeflag = function (flag) {
switch (flag) { switch (flag) {
case 'file': case 'file':
return 0 return 0
@ -70,32 +70,32 @@ var toTypeflag = function(flag) {
return 0 return 0
} }
var alloc = function(size) { var alloc = function (size) {
var buf = new Buffer(size) var buf = new Buffer(size)
buf.fill(0) buf.fill(0)
return buf return buf
} }
var indexOf = function(block, num, offset, end) { var indexOf = function (block, num, offset, end) {
for (; offset < end; offset++) { for (; offset < end; offset++) {
if (block[offset] === num) return offset if (block[offset] === num) return offset
} }
return end return end
} }
var cksum = function(block) { var cksum = function (block) {
var sum = 8 * 32 var sum = 8 * 32
for (var i = 0; i < 148; i++) sum += block[i] for (var i = 0; i < 148; i++) sum += block[i]
for (var i = 156; i < 512; i++) sum += block[i] for (var j = 156; j < 512; j++) sum += block[j]
return sum return sum
} }
var encodeOct = function(val, n) { var encodeOct = function (val, n) {
val = val.toString(8) val = val.toString(8)
return ZEROS.slice(0, n-val.length)+val+' ' return ZEROS.slice(0, n - val.length) + val + ' '
} }
var decodeOct = function(val, offset) { var decodeOct = function (val, offset) {
// Older versions of tar can prefix with spaces // Older versions of tar can prefix with spaces
while (offset < val.length && val[offset] === 32) offset++ while (offset < val.length && val[offset] === 32) offset++
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length) var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
@ -104,43 +104,42 @@ var decodeOct = function(val, offset) {
return parseInt(val.slice(offset, end).toString(), 8) return parseInt(val.slice(offset, end).toString(), 8)
} }
var decodeStr = function(val, offset, length) { var decodeStr = function (val, offset, length) {
return val.slice(offset, indexOf(val, 0, offset, offset+length)).toString(); return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString()
} }
var addLength = function(str) { var addLength = function (str) {
var len = Buffer.byteLength(str) var len = Buffer.byteLength(str)
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1 var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
if (len + digits > Math.pow(10, digits)) digits++ if (len + digits > Math.pow(10, digits)) digits++
return (len+digits)+str return (len + digits) + str
} }
exports.decodeLongPath = function(buf) { exports.decodeLongPath = function (buf) {
return decodeStr(buf, 0, buf.length) return decodeStr(buf, 0, buf.length)
} }
exports.encodePax = function(opts) { // TODO: encode more stuff in pax exports.encodePax = function (opts) { // TODO: encode more stuff in pax
var result = '' var result = ''
if (opts.name) result += addLength(' path='+opts.name+'\n') if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath='+opts.linkname+'\n') if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
return new Buffer(result) return new Buffer(result)
} }
exports.decodePax = function(buf) { exports.decodePax = function (buf) {
var result = {} var result = {}
while (buf.length) { while (buf.length) {
var i = 0 var i = 0
while (i < buf.length && buf[i] !== 32) i++ while (i < buf.length && buf[i] !== 32) i++
var len = parseInt(buf.slice(0, i).toString(), 10)
var len = parseInt(buf.slice(0, i).toString())
if (!len) return result if (!len) return result
var b = buf.slice(i+1, len-1).toString() var b = buf.slice(i + 1, len - 1).toString()
var keyIndex = b.indexOf('=') var keyIndex = b.indexOf('=')
if (keyIndex === -1) return result if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex+1) result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
buf = buf.slice(len) buf = buf.slice(len)
} }
@ -148,19 +147,19 @@ exports.decodePax = function(buf) {
return result return result
} }
exports.encode = function(opts) { exports.encode = function (opts) {
var buf = alloc(512) var buf = alloc(512)
var name = opts.name var name = opts.name
var prefix = '' var prefix = ''
if (opts.typeflag === 5 && name[name.length-1] !== '/') name += '/' if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (Buffer.byteLength(name) !== name.length) return null // utf-8 if (Buffer.byteLength(name) !== name.length) return null // utf-8
while (Buffer.byteLength(name) > 100) { while (Buffer.byteLength(name) > 100) {
var i = name.indexOf('/') var i = name.indexOf('/')
if (i === -1) return null if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i) prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i+1) name = name.slice(i + 1)
} }
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
@ -190,7 +189,7 @@ exports.encode = function(opts) {
return buf return buf
} }
exports.decode = function(buf) { exports.decode = function (buf) {
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
var name = decodeStr(buf, 0, 100) var name = decodeStr(buf, 0, 100)
@ -206,18 +205,18 @@ exports.decode = function(buf) {
var devmajor = decodeOct(buf, 329) var devmajor = decodeOct(buf, 329)
var devminor = decodeOct(buf, 337) var devminor = decodeOct(buf, 337)
if (buf[345]) name = decodeStr(buf, 345, 155)+'/'+name if (buf[345]) name = decodeStr(buf, 345, 155) + '/' + name
// to support old tar versions that use trailing / to indicate dirs // to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5 if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
var c = cksum(buf) var c = cksum(buf)
//checksum is still initial value if header was null. // checksum is still initial value if header was null.
if (c === 8*32) return null if (c === 8 * 32) return null
//valid checksum // valid checksum
if (c !== decodeOct(buf, 148)) throw new Error("Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?") if (c !== decodeOct(buf, 148)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
return { return {
name: name, name: name,

39
pack.js
View file

@ -3,7 +3,6 @@ var eos = require('end-of-stream')
var util = require('util') var util = require('util')
var Readable = require('readable-stream').Readable var Readable = require('readable-stream').Readable
var PassThrough = require('readable-stream').PassThrough
var Writable = require('readable-stream').Writable var Writable = require('readable-stream').Writable
var StringDecoder = require('string_decoder').StringDecoder var StringDecoder = require('string_decoder').StringDecoder
@ -15,14 +14,14 @@ var FMODE = parseInt('644', 8)
var END_OF_TAR = new Buffer(1024) var END_OF_TAR = new Buffer(1024)
END_OF_TAR.fill(0) END_OF_TAR.fill(0)
var noop = function() {} var noop = function () {}
var overflow = function(self, size) { var overflow = function (self, size) {
size &= 511 size &= 511
if (size) self.push(END_OF_TAR.slice(0, 512 - size)) if (size) self.push(END_OF_TAR.slice(0, 512 - size))
} }
function modeToType(mode) { function modeToType (mode) {
switch (mode & constants.S_IFMT) { switch (mode & constants.S_IFMT) {
case constants.S_IFBLK: return 'block-device' case constants.S_IFBLK: return 'block-device'
case constants.S_IFCHR: return 'character-device' case constants.S_IFCHR: return 'character-device'
@ -34,7 +33,7 @@ function modeToType(mode) {
return 'file' return 'file'
} }
var Sink = function(to) { var Sink = function (to) {
Writable.call(this) Writable.call(this)
this.written = 0 this.written = 0
this._to = to this._to = to
@ -43,13 +42,13 @@ var Sink = function(to) {
util.inherits(Sink, Writable) util.inherits(Sink, Writable)
Sink.prototype._write = function(data, enc, cb) { Sink.prototype._write = function (data, enc, cb) {
this.written += data.length this.written += data.length
if (this._to.push(data)) return cb() if (this._to.push(data)) return cb()
this._to._drain = cb this._to._drain = cb
} }
Sink.prototype.destroy = function() { Sink.prototype.destroy = function () {
if (this._destroyed) return if (this._destroyed) return
this._destroyed = true this._destroyed = true
this.emit('close') this.emit('close')
@ -69,30 +68,30 @@ LinkSink.prototype._write = function (data, enc, cb) {
cb() cb()
} }
LinkSink.prototype.destroy = function() { LinkSink.prototype.destroy = function () {
if (this._destroyed) return if (this._destroyed) return
this._destroyed = true this._destroyed = true
this.emit('close') this.emit('close')
} }
var Void = function() { var Void = function () {
Writable.call(this) Writable.call(this)
this._destroyed = false this._destroyed = false
} }
util.inherits(Void, Writable) util.inherits(Void, Writable)
Void.prototype._write = function(data, enc, cb) { Void.prototype._write = function (data, enc, cb) {
cb(new Error('No body allowed for this entry')) cb(new Error('No body allowed for this entry'))
} }
Void.prototype.destroy = function() { Void.prototype.destroy = function () {
if (this._destroyed) return if (this._destroyed) return
this._destroyed = true this._destroyed = true
this.emit('close') this.emit('close')
} }
var Pack = function(opts) { var Pack = function (opts) {
if (!(this instanceof Pack)) return new Pack(opts) if (!(this instanceof Pack)) return new Pack(opts)
Readable.call(this, opts) Readable.call(this, opts)
@ -105,7 +104,7 @@ var Pack = function(opts) {
util.inherits(Pack, Readable) util.inherits(Pack, Readable)
Pack.prototype.entry = function(header, buffer, callback) { Pack.prototype.entry = function (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry') if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this._destroyed) return if (this._finalized || this._destroyed) return
@ -137,7 +136,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
if (header.type === 'symlink' && !header.linkname) { if (header.type === 'symlink' && !header.linkname) {
var linkSink = new LinkSink() var linkSink = new LinkSink()
eos(linkSink, function(err) { eos(linkSink, function (err) {
if (err) { // stream was closed if (err) { // stream was closed
self.destroy() self.destroy()
return callback(err) return callback(err)
@ -162,7 +161,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
this._stream = sink this._stream = sink
eos(sink, function(err) { eos(sink, function (err) {
self._stream = null self._stream = null
if (err) { // stream was closed if (err) { // stream was closed
@ -183,7 +182,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
return sink return sink
} }
Pack.prototype.finalize = function() { Pack.prototype.finalize = function () {
if (this._stream) { if (this._stream) {
this._finalizing = true this._finalizing = true
return return
@ -195,7 +194,7 @@ Pack.prototype.finalize = function() {
this.push(null) this.push(null)
} }
Pack.prototype.destroy = function(err) { Pack.prototype.destroy = function (err) {
if (this._destroyed) return if (this._destroyed) return
this._destroyed = true this._destroyed = true
@ -204,13 +203,13 @@ Pack.prototype.destroy = function(err) {
if (this._stream && this._stream.destroy) this._stream.destroy() if (this._stream && this._stream.destroy) this._stream.destroy()
} }
Pack.prototype._encode = function(header) { Pack.prototype._encode = function (header) {
var buf = headers.encode(header) var buf = headers.encode(header)
if (buf) this.push(buf) if (buf) this.push(buf)
else this._encodePax(header) else this._encodePax(header)
} }
Pack.prototype._encodePax = function(header) { Pack.prototype._encodePax = function (header) {
var paxHeader = headers.encodePax({ var paxHeader = headers.encodePax({
name: header.name, name: header.name,
linkname: header.linkname linkname: header.linkname
@ -240,7 +239,7 @@ Pack.prototype._encodePax = function(header) {
this.push(headers.encode(newHeader)) this.push(headers.encode(newHeader))
} }
Pack.prototype._read = function(n) { Pack.prototype._read = function (n) {
var drain = this._drain var drain = this._drain
this._drain = noop this._drain = noop
drain() drain()

View file

@ -14,10 +14,11 @@
}, },
"devDependencies": { "devDependencies": {
"concat-stream": "^1.4.6", "concat-stream": "^1.4.6",
"standard": "^5.3.1",
"tape": "^3.0.3" "tape": "^3.0.3"
}, },
"scripts": { "scripts": {
"test": "tape test/*.js" "test": "standard && tape test/*.js"
}, },
"keywords": [ "keywords": [
"tar", "tar",

View file

@ -4,7 +4,7 @@ var fixtures = require('./fixtures')
var concat = require('concat-stream') var concat = require('concat-stream')
var fs = require('fs') var fs = require('fs')
var clamp = function(index, len, defaultValue) { var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer. index = ~~index // Coerce to integer.
if (index >= len) return len if (index >= len) return len
@ -14,16 +14,16 @@ var clamp = function(index, len, defaultValue) {
return 0 return 0
} }
test('one-file', function(t) { test('one-file', function (t) {
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'test.txt', name: 'test.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -36,30 +36,30 @@ test('one-file', function(t) {
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'hello world\n') t.same(data.toString(), 'hello world\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR)) extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR))
}) })
test('chunked-one-file', function(t) { test('chunked-one-file', function (t) {
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'test.txt', name: 'test.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -72,36 +72,35 @@ test('chunked-one-file', function(t) {
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'hello world\n') t.same(data.toString(), 'hello world\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
var b = fs.readFileSync(fixtures.ONE_FILE_TAR) var b = fs.readFileSync(fixtures.ONE_FILE_TAR)
for (var i = 0; i < b.length; i += 321) { for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i+321, b.length, b.length))) extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
} }
extract.end() extract.end()
}) })
test('multi-file', function (t) {
test('multi-file', function(t) {
t.plan(5) t.plan(5)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
var onfile1 = function(header, stream, callback) { var onfile1 = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'file-1.txt', name: 'file-1.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -115,16 +114,16 @@ test('multi-file', function(t) {
}) })
extract.on('entry', onfile2) extract.on('entry', onfile2)
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n') t.same(data.toString(), 'i am file-1\n')
callback() callback()
})) }))
} }
var onfile2 = function(header, stream, callback) { var onfile2 = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'file-2.txt', name: 'file-2.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -137,7 +136,7 @@ test('multi-file', function(t) {
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'i am file-2\n') t.same(data.toString(), 'i am file-2\n')
callback() callback()
@ -146,23 +145,23 @@ test('multi-file', function(t) {
extract.once('entry', onfile1) extract.once('entry', onfile1)
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR)) extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR))
}) })
test('chunked-multi-file', function(t) { test('chunked-multi-file', function (t) {
t.plan(5) t.plan(5)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
var onfile1 = function(header, stream, callback) { var onfile1 = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'file-1.txt', name: 'file-1.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -176,16 +175,16 @@ test('chunked-multi-file', function(t) {
}) })
extract.on('entry', onfile2) extract.on('entry', onfile2)
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n') t.same(data.toString(), 'i am file-1\n')
callback() callback()
})) }))
} }
var onfile2 = function(header, stream, callback) { var onfile2 = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'file-2.txt', name: 'file-2.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 12, size: 12,
@ -198,7 +197,7 @@ test('chunked-multi-file', function(t) {
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'i am file-2\n') t.same(data.toString(), 'i am file-2\n')
callback() callback()
@ -207,27 +206,27 @@ test('chunked-multi-file', function(t) {
extract.once('entry', onfile1) extract.once('entry', onfile1)
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
var b = fs.readFileSync(fixtures.MULTI_FILE_TAR) var b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
for (var i = 0; i < b.length; i += 321) { for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i+321, b.length, b.length))) extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
} }
extract.end() extract.end()
}) })
test('types', function(t) { test('types', function (t) {
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
var ondir = function(header, stream, callback) { var ondir = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'directory', name: 'directory',
mode: 0755, mode: parseInt('755', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 0, size: 0,
@ -239,17 +238,17 @@ test('types', function(t) {
devmajor: 0, devmajor: 0,
devminor: 0 devminor: 0
}) })
stream.on('data', function() { stream.on('data', function () {
t.ok(false) t.ok(false)
}) })
extract.once('entry', onlink) extract.once('entry', onlink)
callback() callback()
} }
var onlink = function(header, stream, callback) { var onlink = function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'directory-link', name: 'directory-link',
mode: 0755, mode: parseInt('755', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 0, size: 0,
@ -261,7 +260,7 @@ test('types', function(t) {
devmajor: 0, devmajor: 0,
devminor: 0 devminor: 0
}) })
stream.on('data', function() { stream.on('data', function () {
t.ok(false) t.ok(false)
}) })
noEntries = true noEntries = true
@ -270,23 +269,23 @@ test('types', function(t) {
extract.once('entry', ondir) extract.once('entry', ondir)
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.TYPES_TAR)) extract.end(fs.readFileSync(fixtures.TYPES_TAR))
}) })
test('long-name', function(t) { test('long-name', function (t) {
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt', name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 16, size: 16,
@ -299,30 +298,30 @@ test('long-name', function(t) {
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'hello long name\n') t.same(data.toString(), 'hello long name\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR)) extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR))
}) })
test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'høllø.txt', name: 'høllø.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 4, size: 4,
@ -335,30 +334,30 @@ test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'hej\n') t.same(data.toString(), 'hej\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR)) extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR))
}) })
test('unicode', function(t) { // can unpack a bsdtar unicoded tarball test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
var noEntries = false var noEntries = false
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, { t.deepEqual(header, {
name: 'høstål.txt', name: 'høstål.txt',
mode: 0644, mode: parseInt('644', 8),
uid: 501, uid: 501,
gid: 20, gid: 20,
size: 8, size: 8,
@ -371,47 +370,47 @@ test('unicode', function(t) { // can unpack a bsdtar unicoded tarball
devminor: 0 devminor: 0
}) })
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
noEntries = true noEntries = true
t.same(data.toString(), 'høllø\n') t.same(data.toString(), 'høllø\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(noEntries) t.ok(noEntries)
}) })
extract.end(fs.readFileSync(fixtures.UNICODE_TAR)) extract.end(fs.readFileSync(fixtures.UNICODE_TAR))
}) })
test('name-is-100', function(t) { test('name-is-100', function (t) {
t.plan(3) t.plan(3)
var extract = tar.extract() var extract = tar.extract()
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.same(header.name.length, 100) t.same(header.name.length, 100)
stream.pipe(concat(function(data) { stream.pipe(concat(function (data) {
t.same(data.toString(), 'hello\n') t.same(data.toString(), 'hello\n')
callback() callback()
})) }))
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(true) t.ok(true)
}) })
extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR)) extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
}) })
test('invalid-file', function(t) { test('invalid-file', function (t) {
t.plan(1) t.plan(1)
var extract = tar.extract() var extract = tar.extract()
extract.on('error', function(err) { extract.on('error', function (err) {
t.ok(!!err) t.ok(!!err)
extract.destroy() extract.destroy()
}) })
@ -419,37 +418,36 @@ test('invalid-file', function(t) {
extract.end(fs.readFileSync(fixtures.INVALID_TGZ)) extract.end(fs.readFileSync(fixtures.INVALID_TGZ))
}) })
test('space prefixed', function(t) { test('space prefixed', function (t) {
t.plan(5) t.plan(5)
var extract = tar.extract() var extract = tar.extract()
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.ok(true) t.ok(true)
callback() callback()
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(true) t.ok(true)
}) })
extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ)) extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
}) })
test('gnu long path', function(t) { test('gnu long path', function (t) {
t.plan(2) t.plan(2)
var extract = tar.extract() var extract = tar.extract()
extract.on('entry', function(header, stream, callback) { extract.on('entry', function (header, stream, callback) {
t.ok(header.name.length > 100) t.ok(header.name.length > 100)
callback() callback()
}) })
extract.on('finish', function() { extract.on('finish', function () {
t.ok(true) t.ok(true)
}) })
extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH)) extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
}) })

View file

@ -4,140 +4,139 @@ var fixtures = require('./fixtures')
var concat = require('concat-stream') var concat = require('concat-stream')
var fs = require('fs') var fs = require('fs')
test('one-file', function(t) { test('one-file', function (t) {
t.plan(2) t.plan(2)
var pack = tar.pack() var pack = tar.pack()
pack.entry({ pack.entry({
name:'test.txt', name: 'test.txt',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
mode:0644, mode: parseInt('644', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}, 'hello world\n') }, 'hello world\n')
pack.finalize() pack.finalize()
pack.pipe(concat(function(data) { pack.pipe(concat(function (data) {
t.same(data.length & 511, 0) t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR)) t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR))
})) }))
}) })
test('multi-file', function(t) { test('multi-file', function (t) {
t.plan(2) t.plan(2)
var pack = tar.pack() var pack = tar.pack()
pack.entry({ pack.entry({
name:'file-1.txt', name: 'file-1.txt',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
mode:0644, mode: parseInt('644', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}, 'i am file-1\n') }, 'i am file-1\n')
pack.entry({ pack.entry({
name:'file-2.txt', name: 'file-2.txt',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
mode:0644, mode: parseInt('644', 8),
size:12, size: 12,
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}).end('i am file-2\n') }).end('i am file-2\n')
pack.finalize() pack.finalize()
pack.pipe(concat(function(data) { pack.pipe(concat(function (data) {
t.same(data.length & 511, 0) t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR)) t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR))
})) }))
}) })
test('types', function(t) { test('types', function (t) {
t.plan(2) t.plan(2)
var pack = tar.pack() var pack = tar.pack()
pack.entry({ pack.entry({
name:'directory', name: 'directory',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
type:'directory', type: 'directory',
mode:0755, mode: parseInt('755', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}) })
pack.entry({ pack.entry({
name:'directory-link', name: 'directory-link',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
type:'symlink', type: 'symlink',
linkname: 'directory', linkname: 'directory',
mode:0755, mode: parseInt('755', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}) })
pack.finalize() pack.finalize()
pack.pipe(concat(function(data) { pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0) t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR)) t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR))
})) }))
}) })
test('long-name', function(t) { test('long-name', function (t) {
t.plan(2) t.plan(2)
var pack = tar.pack() var pack = tar.pack()
pack.entry({ pack.entry({
name:'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt', name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
type:'file', type: 'file',
mode:0644, mode: parseInt('644', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}, 'hello long name\n') }, 'hello long name\n')
pack.finalize() pack.finalize()
pack.pipe(concat(function(data) { pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0) t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR)) t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR))
})) }))
}) })
test('unicode', function(t) { test('unicode', function (t) {
t.plan(2) t.plan(2)
var pack = tar.pack() var pack = tar.pack()
pack.entry({ pack.entry({
name:'høstål.txt', name: 'høstål.txt',
mtime:new Date(1387580181000), mtime: new Date(1387580181000),
type:'file', type: 'file',
mode:0644, mode: parseInt('644', 8),
uname:'maf', uname: 'maf',
gname:'staff', gname: 'staff',
uid:501, uid: 501,
gid:20 gid: 20
}, 'høllø\n') }, 'høllø\n')
pack.finalize() pack.finalize()
pack.pipe(concat(function(data) { pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0) t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR)) t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR))
})) }))