tar-stream/test/extract.js

592 lines
12 KiB
JavaScript
Raw Normal View History

2014-11-05 14:05:22 +00:00
var test = require('tape')
var tar = require('../index')
var fixtures = require('./fixtures')
var concat = require('concat-stream')
var fs = require('fs')
var stream = require('stream')
var zlib = require('zlib')
2013-12-21 00:03:48 +00:00
2015-11-06 23:30:21 +00:00
var clamp = function (index, len, defaultValue) {
2014-11-05 14:05:22 +00:00
if (typeof index !== 'number') return defaultValue
index = ~~index // Coerce to integer.
if (index >= len) return len
if (index >= 0) return index
index += len
if (index >= 0) return index
return 0
}
2015-11-06 23:30:21 +00:00
test('one-file', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'test.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
}))
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR))
})
2013-12-21 01:04:02 +00:00
2015-11-06 23:30:21 +00:00
test('chunked-one-file', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'test.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
}))
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.ONE_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
2015-11-06 23:30:21 +00:00
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
2014-11-05 14:05:22 +00:00
}
extract.end()
})
2013-12-21 01:04:02 +00:00
2015-11-06 23:30:21 +00:00
test('multi-file', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(5)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
var onfile1 = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'file-1.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
extract.on('entry', onfile2)
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
t.same(data.toString(), 'i am file-1\n')
callback()
}))
}
2015-11-06 23:30:21 +00:00
var onfile2 = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'file-2.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
}))
}
extract.once('entry', onfile1)
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR))
})
2013-12-21 01:04:02 +00:00
2015-11-06 23:30:21 +00:00
test('chunked-multi-file', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(5)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
var onfile1 = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'file-1.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
extract.on('entry', onfile2)
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
t.same(data.toString(), 'i am file-1\n')
callback()
}))
}
2015-11-06 23:30:21 +00:00
var onfile2 = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'file-2.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'i am file-2\n')
callback()
}))
}
extract.once('entry', onfile1)
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
var b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
for (var i = 0; i < b.length; i += 321) {
2015-11-06 23:30:21 +00:00
extract.write(b.slice(i, clamp(i + 321, b.length, b.length)))
2014-11-05 14:05:22 +00:00
}
extract.end()
})
2013-12-21 01:04:02 +00:00
test('pax', function (t) {
t.plan(3)
var extract = tar.extract()
var noEntries = false
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'pax.txt',
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0,
pax: { path: 'pax.txt', special: 'sauce' }
})
stream.pipe(concat(function (data) {
noEntries = true
t.same(data.toString(), 'hello world\n')
callback()
}))
})
extract.on('finish', function () {
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.PAX_TAR))
})
2015-11-06 23:30:21 +00:00
test('types', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
var ondir = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'directory',
2015-11-06 23:30:21 +00:00
mode: parseInt('755', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 0,
mtime: new Date(1387580181000),
type: 'directory',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.on('data', function () {
2014-11-05 14:05:22 +00:00
t.ok(false)
})
extract.once('entry', onlink)
callback()
}
2015-11-06 23:30:21 +00:00
var onlink = function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'directory-link',
2015-11-06 23:30:21 +00:00
mode: parseInt('755', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 0,
mtime: new Date(1387580181000),
type: 'symlink',
linkname: 'directory',
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.on('data', function () {
2014-11-05 14:05:22 +00:00
t.ok(false)
})
noEntries = true
callback()
}
extract.once('entry', ondir)
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.TYPES_TAR))
})
2013-12-21 01:22:03 +00:00
2015-11-06 23:30:21 +00:00
test('long-name', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 16,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'hello long name\n')
callback()
}))
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR))
})
2015-11-06 23:30:21 +00:00
test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'høllø.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 4,
mtime: new Date(1387588646000),
2016-04-04 17:34:15 +00:00
pax: {'SCHILY.dev': '16777217', 'SCHILY.ino': '3599143', 'SCHILY.nlink': '1', atime: '1387589077', ctime: '1387588646', path: 'høllø.txt'},
2014-11-05 14:05:22 +00:00
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'hej\n')
callback()
}))
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR))
})
2015-11-06 23:30:21 +00:00
test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
2014-11-05 14:05:22 +00:00
t.plan(3)
var extract = tar.extract()
var noEntries = false
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.deepEqual(header, {
name: 'høstål.txt',
2015-11-06 23:30:21 +00:00
mode: parseInt('644', 8),
2014-11-05 14:05:22 +00:00
uid: 501,
gid: 20,
size: 8,
mtime: new Date(1387580181000),
2016-04-04 17:34:15 +00:00
pax: {path: 'høstål.txt'},
2014-11-05 14:05:22 +00:00
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
noEntries = true
t.same(data.toString(), 'høllø\n')
callback()
}))
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(noEntries)
})
extract.end(fs.readFileSync(fixtures.UNICODE_TAR))
})
2014-01-13 20:17:23 +00:00
2015-11-06 23:30:21 +00:00
test('name-is-100', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(3)
2014-01-13 20:17:23 +00:00
2014-11-05 14:05:22 +00:00
var extract = tar.extract()
2014-01-13 20:17:23 +00:00
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.same(header.name.length, 100)
2014-01-13 20:17:23 +00:00
2015-11-06 23:30:21 +00:00
stream.pipe(concat(function (data) {
2014-11-05 14:05:22 +00:00
t.same(data.toString(), 'hello\n')
callback()
}))
})
2014-01-13 20:17:23 +00:00
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(true)
})
2014-01-13 20:17:23 +00:00
2014-11-05 14:05:22 +00:00
extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
})
2014-03-24 14:25:36 +00:00
2015-11-06 23:30:21 +00:00
test('invalid-file', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(1)
2014-03-24 14:25:36 +00:00
2014-11-05 14:05:22 +00:00
var extract = tar.extract()
2014-03-24 14:25:36 +00:00
2015-11-06 23:30:21 +00:00
extract.on('error', function (err) {
2014-11-05 14:05:22 +00:00
t.ok(!!err)
extract.destroy()
})
2014-03-24 14:25:36 +00:00
2014-11-05 14:05:22 +00:00
extract.end(fs.readFileSync(fixtures.INVALID_TGZ))
})
2015-11-06 23:30:21 +00:00
test('space prefixed', function (t) {
2014-11-05 14:05:22 +00:00
t.plan(5)
2014-11-05 14:05:22 +00:00
var extract = tar.extract()
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2014-11-05 14:05:22 +00:00
t.ok(true)
callback()
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2014-11-05 14:05:22 +00:00
t.ok(true)
})
2014-11-05 14:05:22 +00:00
extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
})
2015-04-18 19:31:03 +00:00
2015-11-06 23:30:21 +00:00
test('gnu long path', function (t) {
2015-04-18 19:31:03 +00:00
t.plan(2)
var extract = tar.extract()
2015-11-06 23:30:21 +00:00
extract.on('entry', function (header, stream, callback) {
2015-04-18 19:31:03 +00:00
t.ok(header.name.length > 100)
callback()
})
2015-11-06 23:30:21 +00:00
extract.on('finish', function () {
2015-04-18 19:31:03 +00:00
t.ok(true)
})
extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
2015-11-06 23:30:21 +00:00
})
test('base 256 uid and gid', function (t) {
t.plan(2)
var extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
t.ok(header.uid === 116435139)
t.ok(header.gid === 1876110778)
callback()
})
extract.end(fs.readFileSync(fixtures.BASE_256_UID_GID))
})
2017-05-11 13:09:44 +00:00
test('base 256 size', function (t) {
t.plan(2)
var extract = tar.extract()
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
name: 'test.txt',
mode: parseInt('644', 8),
uid: 501,
gid: 20,
size: 12,
mtime: new Date(1387580181000),
type: 'file',
linkname: null,
uname: 'maf',
gname: 'staff',
devmajor: 0,
devminor: 0
})
callback()
})
extract.on('finish', function () {
t.ok(true)
})
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
})
test('huge', function (t) {
t.plan(1)
var extract = tar.extract()
var noEntries = false
var hugeFileSize = 8804630528 // ~8.2GB
var dataLength = 0
var countStream = new stream.Writable()
countStream._write = function (chunk, encoding, done) {
dataLength += chunk.length
done()
}
// Make sure we read the correct pax size entry for a file larger than 8GB.
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
devmajor: 0,
devminor: 0,
gid: 20,
gname: 'staff',
linkname: null,
mode: 420,
mtime: new Date(1521214967000),
name: 'huge.txt',
pax: {
'LIBARCHIVE.creationtime': '1521214954',
'SCHILY.dev': '16777218',
'SCHILY.ino': '91584182',
'SCHILY.nlink': '1',
atime: '1521214969',
ctime: '1521214967',
size: hugeFileSize.toString()
},
size: hugeFileSize,
type: 'file',
uid: 502,
uname: 'apd4n'
})
noEntries = true
stream.pipe(countStream)
})
extract.on('finish', function () {
t.ok(noEntries)
t.equal(dataLength, hugeFileSize)
})
var gunzip = zlib.createGunzip()
var reader = fs.createReadStream(fixtures.HUGE)
reader.pipe(gunzip).pipe(extract)
})