be standard

This commit is contained in:
Mathias Buus 2015-11-06 15:30:21 -08:00
parent af1abde6a4
commit 33a55ac474
5 changed files with 208 additions and 212 deletions

View file

@ -3,7 +3,7 @@ var ZERO_OFFSET = '0'.charCodeAt(0)
var USTAR = 'ustar\x0000'
var MASK = parseInt('7777', 8)
var clamp = function(index, len, defaultValue) {
var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
@ -13,89 +13,89 @@ var clamp = function(index, len, defaultValue) {
return 0
}
var toType = function(flag) {
var toType = function (flag) {
switch (flag) {
case 0:
return 'file'
return 'file'
case 1:
return 'link'
return 'link'
case 2:
return 'symlink'
return 'symlink'
case 3:
return 'character-device'
return 'character-device'
case 4:
return 'block-device'
return 'block-device'
case 5:
return 'directory'
return 'directory'
case 6:
return 'fifo'
return 'fifo'
case 7:
return 'contiguous-file'
return 'contiguous-file'
case 72:
return 'pax-header'
return 'pax-header'
case 55:
return 'pax-global-header'
return 'pax-global-header'
case 27:
return 'gnu-long-link-path'
return 'gnu-long-link-path'
case 28:
case 30:
return 'gnu-long-path'
return 'gnu-long-path'
}
return null
}
var toTypeflag = function(flag) {
var toTypeflag = function (flag) {
switch (flag) {
case 'file':
return 0
return 0
case 'link':
return 1
return 1
case 'symlink':
return 2
return 2
case 'character-device':
return 3
return 3
case 'block-device':
return 4
return 4
case 'directory':
return 5
return 5
case 'fifo':
return 6
return 6
case 'contiguous-file':
return 7
return 7
case 'pax-header':
return 72
return 72
}
return 0
}
var alloc = function(size) {
var alloc = function (size) {
var buf = new Buffer(size)
buf.fill(0)
return buf
}
var indexOf = function(block, num, offset, end) {
var indexOf = function (block, num, offset, end) {
for (; offset < end; offset++) {
if (block[offset] === num) return offset
}
return end
}
var cksum = function(block) {
var cksum = function (block) {
var sum = 8 * 32
for (var i = 0; i < 148; i++) sum += block[i]
for (var i = 156; i < 512; i++) sum += block[i]
for (var i = 0; i < 148; i++) sum += block[i]
for (var j = 156; j < 512; j++) sum += block[j]
return sum
}
var encodeOct = function(val, n) {
var encodeOct = function (val, n) {
val = val.toString(8)
return ZEROS.slice(0, n-val.length)+val+' '
return ZEROS.slice(0, n - val.length) + val + ' '
}
var decodeOct = function(val, offset) {
var decodeOct = function (val, offset) {
// Older versions of tar can prefix with spaces
while (offset < val.length && val[offset] === 32) offset++
var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
@ -104,43 +104,42 @@ var decodeOct = function(val, offset) {
return parseInt(val.slice(offset, end).toString(), 8)
}
var decodeStr = function(val, offset, length) {
return val.slice(offset, indexOf(val, 0, offset, offset+length)).toString();
var decodeStr = function (val, offset, length) {
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString()
}
var addLength = function(str) {
var addLength = function (str) {
var len = Buffer.byteLength(str)
var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
if (len + digits > Math.pow(10, digits)) digits++
return (len+digits)+str
return (len + digits) + str
}
exports.decodeLongPath = function(buf) {
exports.decodeLongPath = function (buf) {
return decodeStr(buf, 0, buf.length)
}
exports.encodePax = function(opts) { // TODO: encode more stuff in pax
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
var result = ''
if (opts.name) result += addLength(' path='+opts.name+'\n')
if (opts.linkname) result += addLength(' linkpath='+opts.linkname+'\n')
if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
return new Buffer(result)
}
exports.decodePax = function(buf) {
exports.decodePax = function (buf) {
var result = {}
while (buf.length) {
var i = 0
while (i < buf.length && buf[i] !== 32) i++
var len = parseInt(buf.slice(0, i).toString())
var len = parseInt(buf.slice(0, i).toString(), 10)
if (!len) return result
var b = buf.slice(i+1, len-1).toString()
var b = buf.slice(i + 1, len - 1).toString()
var keyIndex = b.indexOf('=')
if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex+1)
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
buf = buf.slice(len)
}
@ -148,19 +147,19 @@ exports.decodePax = function(buf) {
return result
}
exports.encode = function(opts) {
exports.encode = function (opts) {
var buf = alloc(512)
var name = opts.name
var prefix = ''
if (opts.typeflag === 5 && name[name.length-1] !== '/') name += '/'
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (Buffer.byteLength(name) !== name.length) return null // utf-8
while (Buffer.byteLength(name) > 100) {
var i = name.indexOf('/')
if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i+1)
name = name.slice(i + 1)
}
if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
@ -190,7 +189,7 @@ exports.encode = function(opts) {
return buf
}
exports.decode = function(buf) {
exports.decode = function (buf) {
var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
var name = decodeStr(buf, 0, 100)
@ -206,18 +205,18 @@ exports.decode = function(buf) {
var devmajor = decodeOct(buf, 329)
var devminor = decodeOct(buf, 337)
if (buf[345]) name = decodeStr(buf, 345, 155)+'/'+name
if (buf[345]) name = decodeStr(buf, 345, 155) + '/' + name
// to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
var c = cksum(buf)
//checksum is still initial value if header was null.
if (c === 8*32) return null
// checksum is still initial value if header was null.
if (c === 8 * 32) return null
//valid checksum
if (c !== decodeOct(buf, 148)) throw new Error("Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?")
// valid checksum
if (c !== decodeOct(buf, 148)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
return {
name: name,

49
pack.js
View file

@ -3,7 +3,6 @@ var eos = require('end-of-stream')
var util = require('util')
var Readable = require('readable-stream').Readable
var PassThrough = require('readable-stream').PassThrough
var Writable = require('readable-stream').Writable
var StringDecoder = require('string_decoder').StringDecoder
@ -15,14 +14,14 @@ var FMODE = parseInt('644', 8)
var END_OF_TAR = new Buffer(1024)
END_OF_TAR.fill(0)
var noop = function() {}
var noop = function () {}
var overflow = function(self, size) {
var overflow = function (self, size) {
size &= 511
if (size) self.push(END_OF_TAR.slice(0, 512 - size))
}
function modeToType(mode) {
function modeToType (mode) {
switch (mode & constants.S_IFMT) {
case constants.S_IFBLK: return 'block-device'
case constants.S_IFCHR: return 'character-device'
@ -34,7 +33,7 @@ function modeToType(mode) {
return 'file'
}
var Sink = function(to) {
var Sink = function (to) {
Writable.call(this)
this.written = 0
this._to = to
@ -43,13 +42,13 @@ var Sink = function(to) {
util.inherits(Sink, Writable)
Sink.prototype._write = function(data, enc, cb) {
Sink.prototype._write = function (data, enc, cb) {
this.written += data.length
if (this._to.push(data)) return cb()
this._to._drain = cb
}
Sink.prototype.destroy = function() {
Sink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
@ -69,30 +68,30 @@ LinkSink.prototype._write = function (data, enc, cb) {
cb()
}
LinkSink.prototype.destroy = function() {
LinkSink.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Void = function() {
var Void = function () {
Writable.call(this)
this._destroyed = false
}
util.inherits(Void, Writable)
Void.prototype._write = function(data, enc, cb) {
Void.prototype._write = function (data, enc, cb) {
cb(new Error('No body allowed for this entry'))
}
Void.prototype.destroy = function() {
Void.prototype.destroy = function () {
if (this._destroyed) return
this._destroyed = true
this.emit('close')
}
var Pack = function(opts) {
var Pack = function (opts) {
if (!(this instanceof Pack)) return new Pack(opts)
Readable.call(this, opts)
@ -105,7 +104,7 @@ var Pack = function(opts) {
util.inherits(Pack, Readable)
Pack.prototype.entry = function(header, buffer, callback) {
Pack.prototype.entry = function (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this._destroyed) return
@ -118,11 +117,11 @@ Pack.prototype.entry = function(header, buffer, callback) {
var self = this
if (!header.size) header.size = 0
if (!header.type) header.type = modeToType(header.mode)
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
if (!header.uid) header.uid = 0
if (!header.gid) header.gid = 0
if (!header.size) header.size = 0
if (!header.type) header.type = modeToType(header.mode)
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
if (!header.uid) header.uid = 0
if (!header.gid) header.gid = 0
if (!header.mtime) header.mtime = new Date()
if (typeof buffer === 'string') buffer = new Buffer(buffer)
@ -137,7 +136,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
if (header.type === 'symlink' && !header.linkname) {
var linkSink = new LinkSink()
eos(linkSink, function(err) {
eos(linkSink, function (err) {
if (err) { // stream was closed
self.destroy()
return callback(err)
@ -162,7 +161,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
this._stream = sink
eos(sink, function(err) {
eos(sink, function (err) {
self._stream = null
if (err) { // stream was closed
@ -183,7 +182,7 @@ Pack.prototype.entry = function(header, buffer, callback) {
return sink
}
Pack.prototype.finalize = function() {
Pack.prototype.finalize = function () {
if (this._stream) {
this._finalizing = true
return
@ -195,7 +194,7 @@ Pack.prototype.finalize = function() {
this.push(null)
}
Pack.prototype.destroy = function(err) {
Pack.prototype.destroy = function (err) {
if (this._destroyed) return
this._destroyed = true
@ -204,13 +203,13 @@ Pack.prototype.destroy = function(err) {
if (this._stream && this._stream.destroy) this._stream.destroy()
}
Pack.prototype._encode = function(header) {
Pack.prototype._encode = function (header) {
var buf = headers.encode(header)
if (buf) this.push(buf)
else this._encodePax(header)
}
Pack.prototype._encodePax = function(header) {
Pack.prototype._encodePax = function (header) {
var paxHeader = headers.encodePax({
name: header.name,
linkname: header.linkname
@ -240,7 +239,7 @@ Pack.prototype._encodePax = function(header) {
this.push(headers.encode(newHeader))
}
Pack.prototype._read = function(n) {
Pack.prototype._read = function (n) {
var drain = this._drain
this._drain = noop
drain()

View file

@ -14,10 +14,11 @@
},
"devDependencies": {
"concat-stream": "^1.4.6",
"standard": "^5.3.1",
"tape": "^3.0.3"
},
"scripts": {
"test": "tape test/*.js"
"test": "standard && tape test/*.js"
},
"keywords": [
"tar",

View file

@ -4,7 +4,7 @@ var fixtures = require('./fixtures')
var concat = require('concat-stream')
var fs = require('fs')
var clamp = function(index, len, defaultValue) {
var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
@ -14,16 +14,16 @@ var clamp = function(index, len, defaultValue) {
return 0
}
test('one-file', function(t) {
test('one-file', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'test.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -36,30 +36,30 @@ test('one-file', function(t) {
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR))
})
test('chunked-one-file', function(t) {
test('chunked-one-file', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'test.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -72,36 +72,35 @@ test('chunked-one-file', function(t) {
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.ONE_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i+321, b.length, b.length)))
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
}
extract.end()
})
test('multi-file', function(t) {
test('multi-file', function (t) {
t.plan(5)
var extract = tar.extract()
var noEntries = false
var onfile1 = function(header, stream, callback) {
var onfile1 = function (header, stream, callback) {
t.deepEqual(header, {
name: 'file-1.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -115,16 +114,16 @@ test('multi-file', function(t) {
})
extract.on('entry', onfile2)
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n')
callback()
}))
}
var onfile2 = function(header, stream, callback) {
var onfile2 = function (header, stream, callback) {
t.deepEqual(header, {
name: 'file-2.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -137,7 +136,7 @@ test('multi-file', function(t) {
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
@ -146,23 +145,23 @@ test('multi-file', function(t) {
extract.once('entry', onfile1)
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR))
})
test('chunked-multi-file', function(t) {
test('chunked-multi-file', function (t) {
t.plan(5)
var extract = tar.extract()
var noEntries = false
var onfile1 = function(header, stream, callback) {
var onfile1 = function (header, stream, callback) {
t.deepEqual(header, {
name: 'file-1.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -176,16 +175,16 @@ test('chunked-multi-file', function(t) {
})
extract.on('entry', onfile2)
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
t.same(data.toString(), 'i am file-1\n')
callback()
}))
}
var onfile2 = function(header, stream, callback) {
var onfile2 = function (header, stream, callback) {
t.deepEqual(header, {
name: 'file-2.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
@ -198,7 +197,7 @@ test('chunked-multi-file', function(t) {
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
@ -207,27 +206,27 @@ test('chunked-multi-file', function(t) {
extract.once('entry', onfile1)
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
extract.write(b.slice(i, clamp(i+321, b.length, b.length)))
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
}
extract.end()
})
test('types', function(t) {
test('types', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
var ondir = function(header, stream, callback) {
var ondir = function (header, stream, callback) {
t.deepEqual(header, {
name: 'directory',
mode: 0755,
mode: parseInt('755', 8),
uid: 501,
gid: 20,
size: 0,
@ -239,17 +238,17 @@ test('types', function(t) {
devmajor: 0,
devminor: 0
})
stream.on('data', function() {
stream.on('data', function () {
t.ok(false)
})
extract.once('entry', onlink)
callback()
}
var onlink = function(header, stream, callback) {
var onlink = function (header, stream, callback) {
t.deepEqual(header, {
name: 'directory-link',
mode: 0755,
mode: parseInt('755', 8),
uid: 501,
gid: 20,
size: 0,
@ -261,7 +260,7 @@ test('types', function(t) {
devmajor: 0,
devminor: 0
})
stream.on('data', function() {
stream.on('data', function () {
t.ok(false)
})
noEntries = true
@ -270,23 +269,23 @@ test('types', function(t) {
extract.once('entry', ondir)
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.TYPES_TAR))
})
test('long-name', function(t) {
test('long-name', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 16,
@ -299,30 +298,30 @@ test('long-name', function(t) {
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello long name\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR))
})
test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball
test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'høllø.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 4,
@ -335,30 +334,30 @@ test('unicode-bsd', function(t) { // can unpack a bsdtar unicoded tarball
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hej\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR))
})
test('unicode', function(t) { // can unpack a bsdtar unicoded tarball
test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'høstål.txt',
mode: 0644,
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 8,
@ -371,47 +370,47 @@ test('unicode', function(t) { // can unpack a bsdtar unicoded tarball
devminor: 0
})
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'høllø\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.UNICODE_TAR))
})
test('name-is-100', function(t) {
test('name-is-100', function (t) {
t.plan(3)
var extract = tar.extract()
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.same(header.name.length, 100)
stream.pipe(concat(function(data) {
stream.pipe(concat(function (data) {
t.same(data.toString(), 'hello\n')
callback()
}))
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(true)
})
extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
})
test('invalid-file', function(t) {
test('invalid-file', function (t) {
t.plan(1)
var extract = tar.extract()
extract.on('error', function(err) {
extract.on('error', function (err) {
t.ok(!!err)
extract.destroy()
})
@ -419,37 +418,36 @@ test('invalid-file', function(t) {
extract.end(fs.readFileSync(fixtures.INVALID_TGZ))
})
test('space prefixed', function(t) {
test('space prefixed', function (t) {
t.plan(5)
var extract = tar.extract()
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.ok(true)
callback()
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(true)
})
extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
})
test('gnu long path', function(t) {
test('gnu long path', function (t) {
t.plan(2)
var extract = tar.extract()
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
t.ok(header.name.length > 100)
callback()
})
extract.on('finish', function() {
extract.on('finish', function () {
t.ok(true)
})
extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
})

View file

@ -4,140 +4,139 @@ var fixtures = require('./fixtures')
var concat = require('concat-stream')
var fs = require('fs')
test('one-file', function(t) {
test('one-file', function (t) {
t.plan(2)
var pack = tar.pack()
pack.entry({
name:'test.txt',
mtime:new Date(1387580181000),
mode:0644,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'test.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}, 'hello world\n')
pack.finalize()
pack.pipe(concat(function(data) {
pack.pipe(concat(function (data) {
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.ONE_FILE_TAR))
}))
})
test('multi-file', function(t) {
test('multi-file', function (t) {
t.plan(2)
var pack = tar.pack()
pack.entry({
name:'file-1.txt',
mtime:new Date(1387580181000),
mode:0644,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'file-1.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}, 'i am file-1\n')
pack.entry({
name:'file-2.txt',
mtime:new Date(1387580181000),
mode:0644,
size:12,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'file-2.txt',
mtime: new Date(1387580181000),
mode: parseInt('644', 8),
size: 12,
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}).end('i am file-2\n')
pack.finalize()
pack.pipe(concat(function(data) {
pack.pipe(concat(function (data) {
t.same(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.MULTI_FILE_TAR))
}))
})
test('types', function(t) {
test('types', function (t) {
t.plan(2)
var pack = tar.pack()
pack.entry({
name:'directory',
mtime:new Date(1387580181000),
type:'directory',
mode:0755,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'directory',
mtime: new Date(1387580181000),
type: 'directory',
mode: parseInt('755', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
})
pack.entry({
name:'directory-link',
mtime:new Date(1387580181000),
type:'symlink',
name: 'directory-link',
mtime: new Date(1387580181000),
type: 'symlink',
linkname: 'directory',
mode:0755,
uname:'maf',
gname:'staff',
uid:501,
gid:20
mode: parseInt('755', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
})
pack.finalize()
pack.pipe(concat(function(data) {
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.TYPES_TAR))
}))
})
test('long-name', function(t) {
test('long-name', function (t) {
t.plan(2)
var pack = tar.pack()
pack.entry({
name:'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mtime:new Date(1387580181000),
type:'file',
mode:0644,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
mtime: new Date(1387580181000),
type: 'file',
mode: parseInt('644', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}, 'hello long name\n')
pack.finalize()
pack.pipe(concat(function(data) {
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.LONG_NAME_TAR))
}))
})
test('unicode', function(t) {
test('unicode', function (t) {
t.plan(2)
var pack = tar.pack()
pack.entry({
name:'høstål.txt',
mtime:new Date(1387580181000),
type:'file',
mode:0644,
uname:'maf',
gname:'staff',
uid:501,
gid:20
name: 'høstål.txt',
mtime: new Date(1387580181000),
type: 'file',
mode: parseInt('644', 8),
uname: 'maf',
gname: 'staff',
uid: 501,
gid: 20
}, 'høllø\n')
pack.finalize()
pack.pipe(concat(function(data) {
pack.pipe(concat(function (data) {
t.equal(data.length & 511, 0)
t.deepEqual(data, fs.readFileSync(fixtures.UNICODE_TAR))
}))