2022-12-06 15:04:04 +00:00
|
|
|
const test = require('brittle')
|
|
|
|
const concat = require('concat-stream')
|
|
|
|
const fs = require('fs')
|
|
|
|
const tar = require('..')
|
|
|
|
const fixtures = require('./fixtures')
|
2013-12-21 00:03:48 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('one-file', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'test.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hello world\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.ONE_FILE_TAR))
|
|
|
|
})
|
2013-12-21 01:04:02 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('chunked-one-file', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'test.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hello world\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const b = fs.readFileSync(fixtures.ONE_FILE_TAR)
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
for (let i = 0; i < b.length; i += 321) {
|
|
|
|
extract.write(b.subarray(i, clamp(i + 321, b.length, b.length)))
|
2014-11-05 14:05:22 +00:00
|
|
|
}
|
|
|
|
extract.end()
|
|
|
|
})
|
2013-12-21 01:04:02 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('multi-file', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(5)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
extract.once('entry', onfile1)
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.MULTI_FILE_TAR))
|
|
|
|
|
|
|
|
function onfile1 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'file-1.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('entry', onfile2)
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-1\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
function onfile2 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'file-2.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-2\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
})
|
2013-12-21 01:04:02 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('chunked-multi-file', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(5)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
extract.once('entry', onfile1)
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
const b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
|
|
|
|
for (let i = 0; i < b.length; i += 321) {
|
|
|
|
extract.write(b.subarray(i, clamp(i + 321, b.length, b.length)))
|
|
|
|
}
|
|
|
|
extract.end()
|
|
|
|
|
|
|
|
function onfile1 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'file-1.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('entry', onfile2)
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-1\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
function onfile2 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'file-2.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-2\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
})
|
2013-12-21 01:04:02 +00:00
|
|
|
|
2016-04-05 14:40:00 +00:00
|
|
|
test('pax', function (t) {
|
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2016-04-05 14:40:00 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2016-04-05 14:40:00 +00:00
|
|
|
name: 'pax.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2016-04-05 14:40:00 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
|
|
|
devminor: 0,
|
|
|
|
pax: { path: 'pax.txt', special: 'sauce' }
|
|
|
|
})
|
|
|
|
|
|
|
|
stream.pipe(concat(function (data) {
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hello world\n')
|
|
|
|
cb()
|
2016-04-05 14:40:00 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.PAX_TAR))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('types', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
extract.once('entry', ondir)
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.TYPES_TAR))
|
|
|
|
|
|
|
|
function ondir (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'directory',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o755,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 0,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'directory',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.on('data', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(false)
|
|
|
|
})
|
|
|
|
extract.once('entry', onlink)
|
2022-12-06 15:04:04 +00:00
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}
|
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
function onlink (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'directory-link',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o755,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 0,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'symlink',
|
|
|
|
linkname: 'directory',
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.on('data', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(false)
|
|
|
|
})
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}
|
|
|
|
})
|
2013-12-21 01:22:03 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('long-name', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'my/file/is/longer/than/100/characters/and/should/use/the/prefix/header/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/foobarbaz/filename.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 16,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hello long name\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.LONG_NAME_TAR))
|
|
|
|
})
|
2013-12-21 01:32:52 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('unicode-bsd', function (t) { // can unpack a bsdtar unicoded tarball
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'høllø.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 4,
|
|
|
|
mtime: new Date(1387588646000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: { 'SCHILY.dev': '16777217', 'SCHILY.ino': '3599143', 'SCHILY.nlink': '1', atime: '1387589077', ctime: '1387588646', path: 'høllø.txt' }
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hej\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.UNICODE_BSD_TAR))
|
|
|
|
})
|
2013-12-21 01:32:52 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('unicode', function (t) { // can unpack a bsdtar unicoded tarball
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2014-11-05 14:05:22 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2014-11-05 14:05:22 +00:00
|
|
|
name: 'høstål.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2014-11-05 14:05:22 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 8,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: { path: 'høstål.txt' }
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2014-11-05 14:05:22 +00:00
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'høllø\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.UNICODE_TAR))
|
|
|
|
})
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('name-is-100', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(3)
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.is(header.name.length, 100)
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
stream.pipe(concat(function (data) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'hello\n')
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
}))
|
|
|
|
})
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.pass()
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
2014-01-13 20:17:23 +00:00
|
|
|
|
2014-11-05 14:05:22 +00:00
|
|
|
extract.end(fs.readFileSync(fixtures.NAME_IS_100_TAR))
|
|
|
|
})
|
2014-03-24 14:25:36 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('invalid-file', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(1)
|
2014-03-24 14:25:36 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2014-03-24 14:25:36 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('error', function (err) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.ok(!!err)
|
|
|
|
extract.destroy()
|
|
|
|
})
|
2014-03-24 14:25:36 +00:00
|
|
|
|
2014-11-05 14:05:22 +00:00
|
|
|
extract.end(fs.readFileSync(fixtures.INVALID_TGZ))
|
|
|
|
})
|
2014-09-23 12:00:45 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('space prefixed', function (t) {
|
2014-11-05 14:05:22 +00:00
|
|
|
t.plan(5)
|
2014-09-23 12:00:45 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2014-09-23 12:00:45 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.pass()
|
|
|
|
cb()
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
2014-09-23 12:00:45 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.pass()
|
2014-11-05 14:05:22 +00:00
|
|
|
})
|
2014-09-23 12:00:45 +00:00
|
|
|
|
2014-11-05 14:05:22 +00:00
|
|
|
extract.end(fs.readFileSync(fixtures.SPACE_TAR_GZ))
|
|
|
|
})
|
2015-04-18 19:31:03 +00:00
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
test('gnu long path', function (t) {
|
2015-04-18 19:31:03 +00:00
|
|
|
t.plan(2)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2015-04-18 19:31:03 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
2015-04-18 19:31:03 +00:00
|
|
|
t.ok(header.name.length > 100)
|
2022-12-06 15:04:04 +00:00
|
|
|
cb()
|
2015-04-18 19:31:03 +00:00
|
|
|
})
|
|
|
|
|
2015-11-06 23:30:21 +00:00
|
|
|
extract.on('finish', function () {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.pass()
|
2015-04-18 19:31:03 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.GNU_LONG_PATH))
|
2015-11-06 23:30:21 +00:00
|
|
|
})
|
2016-02-17 02:34:42 +00:00
|
|
|
|
|
|
|
test('base 256 uid and gid', function (t) {
|
|
|
|
t.plan(2)
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2016-02-17 02:34:42 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
2016-02-17 02:34:42 +00:00
|
|
|
t.ok(header.uid === 116435139)
|
|
|
|
t.ok(header.gid === 1876110778)
|
2022-12-06 15:04:04 +00:00
|
|
|
cb()
|
2016-02-17 02:34:42 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.BASE_256_UID_GID))
|
|
|
|
})
|
2017-05-11 13:09:44 +00:00
|
|
|
|
|
|
|
test('base 256 size', function (t) {
|
|
|
|
t.plan(2)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2017-05-11 13:09:44 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2017-05-11 13:09:44 +00:00
|
|
|
name: 'test.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2017-05-11 13:09:44 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2017-05-11 13:09:44 +00:00
|
|
|
})
|
2022-12-06 15:04:04 +00:00
|
|
|
cb()
|
2017-05-11 13:09:44 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.pass()
|
2017-05-11 13:09:44 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
|
|
|
|
})
|
2018-03-16 20:13:57 +00:00
|
|
|
|
2018-03-16 23:22:49 +00:00
|
|
|
test('latin-1', function (t) { // can unpack filenames encoded in latin-1
|
|
|
|
t.plan(3)
|
|
|
|
|
|
|
|
// This is the older name for the "latin1" encoding in Node
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract({ filenameEncoding: 'binary' })
|
|
|
|
let noEntries = false
|
2018-03-16 23:22:49 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2018-03-16 23:22:49 +00:00
|
|
|
name: 'En français, s\'il vous plaît?.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2018-03-16 23:22:49 +00:00
|
|
|
uid: 0,
|
|
|
|
gid: 0,
|
|
|
|
size: 14,
|
|
|
|
mtime: new Date(1495941034000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'root',
|
|
|
|
gname: 'root',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2018-03-16 23:22:49 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
stream.pipe(concat(function (data) {
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'Hello, world!\n')
|
|
|
|
cb()
|
2018-03-16 23:22:49 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.LATIN1_TAR))
|
|
|
|
})
|
2018-04-26 15:00:57 +00:00
|
|
|
|
|
|
|
test('incomplete', function (t) {
|
2018-04-27 15:40:46 +00:00
|
|
|
t.plan(1)
|
2018-04-26 15:00:57 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2018-04-26 15:00:57 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
cb()
|
2018-04-26 15:00:57 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('error', function (err) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(err.message, 'Unexpected end of data')
|
2018-04-26 15:00:57 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
2018-04-27 15:40:46 +00:00
|
|
|
t.fail('should not finish')
|
2018-04-26 15:00:57 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.INCOMPLETE_TAR))
|
|
|
|
})
|
2019-05-30 20:29:05 +00:00
|
|
|
|
|
|
|
test('gnu', function (t) { // can correctly unpack gnu-tar format
|
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2019-05-30 20:29:05 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2019-05-30 20:29:05 +00:00
|
|
|
name: 'test.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2019-05-30 20:29:05 +00:00
|
|
|
uid: 12345,
|
|
|
|
gid: 67890,
|
|
|
|
size: 14,
|
|
|
|
mtime: new Date(1559239869000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'myuser',
|
|
|
|
gname: 'mygroup',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2019-05-30 20:29:05 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
stream.pipe(concat(function (data) {
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'Hello, world!\n')
|
|
|
|
cb()
|
2019-05-30 20:29:05 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.GNU_TAR))
|
|
|
|
})
|
|
|
|
|
|
|
|
test('gnu-incremental', function (t) {
|
|
|
|
// can correctly unpack gnu-tar incremental format. In this situation,
|
|
|
|
// the tarball will have additional ctime and atime values in the header,
|
|
|
|
// and without awareness of the 'gnu' tar format, the atime (offset 345) is mistaken
|
|
|
|
// for a directory prefix (also offset 345).
|
|
|
|
t.plan(3)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
|
|
|
let noEntries = false
|
2019-05-30 20:29:05 +00:00
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
extract.on('entry', function (header, stream, cb) {
|
|
|
|
t.alike(header, {
|
2019-05-30 20:29:05 +00:00
|
|
|
name: 'test.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2019-05-30 20:29:05 +00:00
|
|
|
uid: 12345,
|
|
|
|
gid: 67890,
|
|
|
|
size: 14,
|
|
|
|
mtime: new Date(1559239869000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'myuser',
|
|
|
|
gname: 'mygroup',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2019-05-30 20:29:05 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
stream.pipe(concat(function (data) {
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'Hello, world!\n')
|
|
|
|
cb()
|
2019-05-30 20:29:05 +00:00
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.GNU_INCREMENTAL_TAR))
|
|
|
|
})
|
|
|
|
|
|
|
|
test('v7 unsupported', function (t) { // correctly fails to parse v7 tarballs
|
|
|
|
t.plan(1)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2019-05-30 20:29:05 +00:00
|
|
|
|
|
|
|
extract.on('error', function (err) {
|
|
|
|
t.ok(!!err)
|
|
|
|
extract.destroy()
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.V7_TAR))
|
|
|
|
})
|
2020-12-29 10:22:18 +00:00
|
|
|
|
|
|
|
test('unknown format doesn\'t extract by default', function (t) {
|
|
|
|
t.plan(1)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract()
|
2020-12-29 10:22:18 +00:00
|
|
|
|
|
|
|
extract.on('error', function (err) {
|
|
|
|
t.ok(!!err)
|
|
|
|
extract.destroy()
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.UNKNOWN_FORMAT))
|
|
|
|
})
|
|
|
|
|
|
|
|
test('unknown format attempts to extract if allowed', function (t) {
|
|
|
|
t.plan(5)
|
|
|
|
|
2022-12-06 15:04:04 +00:00
|
|
|
const extract = tar.extract({ allowUnknownFormat: true })
|
|
|
|
let noEntries = false
|
2020-12-29 10:22:18 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
extract.once('entry', onfile1)
|
|
|
|
|
|
|
|
extract.on('finish', function () {
|
|
|
|
t.ok(noEntries)
|
|
|
|
})
|
|
|
|
|
|
|
|
extract.end(fs.readFileSync(fixtures.UNKNOWN_FORMAT))
|
|
|
|
|
|
|
|
function onfile1 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2020-12-29 10:22:18 +00:00
|
|
|
name: 'file-1.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2020-12-29 10:22:18 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2020-12-29 10:22:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
extract.on('entry', onfile2)
|
|
|
|
stream.pipe(concat(function (data) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-1\n')
|
|
|
|
cb()
|
2020-12-29 10:22:18 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
function onfile2 (header, stream, cb) {
|
2022-12-06 15:04:04 +00:00
|
|
|
t.alike(header, {
|
2020-12-29 10:22:18 +00:00
|
|
|
name: 'file-2.txt',
|
2022-12-06 15:04:04 +00:00
|
|
|
mode: 0o644,
|
2020-12-29 10:22:18 +00:00
|
|
|
uid: 501,
|
|
|
|
gid: 20,
|
|
|
|
size: 12,
|
|
|
|
mtime: new Date(1387580181000),
|
|
|
|
type: 'file',
|
|
|
|
linkname: null,
|
|
|
|
uname: 'maf',
|
|
|
|
gname: 'staff',
|
|
|
|
devmajor: 0,
|
2023-06-17 17:11:53 +00:00
|
|
|
devminor: 0,
|
|
|
|
pax: null
|
2020-12-29 10:22:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
stream.pipe(concat(function (data) {
|
|
|
|
noEntries = true
|
2022-12-06 15:04:04 +00:00
|
|
|
t.is(data.toString(), 'i am file-2\n')
|
|
|
|
cb()
|
2020-12-29 10:22:18 +00:00
|
|
|
}))
|
|
|
|
}
|
2023-06-17 17:11:53 +00:00
|
|
|
})
|
2020-12-29 10:22:18 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
test('extract streams are async iterators', async function (t) {
|
|
|
|
const extract = tar.extract()
|
|
|
|
const b = fs.readFileSync(fixtures.MULTI_FILE_TAR)
|
2020-12-29 10:22:18 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
extract.end(b)
|
2020-12-29 10:22:18 +00:00
|
|
|
|
2023-06-17 17:11:53 +00:00
|
|
|
const expected = ['file-1.txt', 'file-2.txt']
|
|
|
|
|
|
|
|
for await (const entry of extract) {
|
|
|
|
t.is(entry.header.name, expected.shift())
|
|
|
|
entry.resume()
|
|
|
|
t.comment('wait a bit...')
|
|
|
|
await new Promise(resolve => setTimeout(resolve, 100))
|
|
|
|
}
|
2020-12-29 10:22:18 +00:00
|
|
|
})
|
2023-06-17 17:11:53 +00:00
|
|
|
|
|
|
|
function clamp (index, len, defaultValue) {
|
|
|
|
if (typeof index !== 'number') return defaultValue
|
|
|
|
index = ~~index // Coerce to integer.
|
|
|
|
if (index >= len) return len
|
|
|
|
if (index >= 0) return index
|
|
|
|
index += len
|
|
|
|
if (index >= 0) return index
|
|
|
|
return 0
|
|
|
|
}
|