Overwrite header.size with pax header size, if present (#78)
* Overwrite header.size with pax header size, if present * Added test for huge file extract * Fixed style guide violations * More style guide fixes
This commit is contained in:
parent
209ceac59b
commit
a9f74facbf
4 changed files with 58 additions and 0 deletions
|
@ -22,6 +22,7 @@ var emptyStream = function (self, offset) {
|
||||||
var mixinPax = function (header, pax) {
|
var mixinPax = function (header, pax) {
|
||||||
if (pax.path) header.name = pax.path
|
if (pax.path) header.name = pax.path
|
||||||
if (pax.linkpath) header.linkname = pax.linkpath
|
if (pax.linkpath) header.linkname = pax.linkpath
|
||||||
|
if (pax.size) header.size = parseInt(pax.size, 10)
|
||||||
header.pax = pax
|
header.pax = pax
|
||||||
return header
|
return header
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,8 @@ var tar = require('../index')
|
||||||
var fixtures = require('./fixtures')
|
var fixtures = require('./fixtures')
|
||||||
var concat = require('concat-stream')
|
var concat = require('concat-stream')
|
||||||
var fs = require('fs')
|
var fs = require('fs')
|
||||||
|
var stream = require('stream')
|
||||||
|
var zlib = require('zlib')
|
||||||
|
|
||||||
var clamp = function (index, len, defaultValue) {
|
var clamp = function (index, len, defaultValue) {
|
||||||
if (typeof index !== 'number') return defaultValue
|
if (typeof index !== 'number') return defaultValue
|
||||||
|
@ -533,3 +535,57 @@ test('base 256 size', function (t) {
|
||||||
|
|
||||||
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
|
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('huge', function (t) {
|
||||||
|
t.plan(1)
|
||||||
|
|
||||||
|
var extract = tar.extract()
|
||||||
|
var noEntries = false
|
||||||
|
var hugeFileSize = 8804630528 // ~8.2GB
|
||||||
|
var dataLength = 0
|
||||||
|
|
||||||
|
var countStream = new stream.Writable()
|
||||||
|
countStream._write = function (chunk, encoding, done) {
|
||||||
|
dataLength += chunk.length
|
||||||
|
done()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we read the correct pax size entry for a file larger than 8GB.
|
||||||
|
extract.on('entry', function (header, stream, callback) {
|
||||||
|
t.deepEqual(header, {
|
||||||
|
devmajor: 0,
|
||||||
|
devminor: 0,
|
||||||
|
gid: 20,
|
||||||
|
gname: 'staff',
|
||||||
|
linkname: null,
|
||||||
|
mode: 420,
|
||||||
|
mtime: new Date(1521214967000),
|
||||||
|
name: 'huge.txt',
|
||||||
|
pax: {
|
||||||
|
'LIBARCHIVE.creationtime': '1521214954',
|
||||||
|
'SCHILY.dev': '16777218',
|
||||||
|
'SCHILY.ino': '91584182',
|
||||||
|
'SCHILY.nlink': '1',
|
||||||
|
atime: '1521214969',
|
||||||
|
ctime: '1521214967',
|
||||||
|
size: hugeFileSize.toString()
|
||||||
|
},
|
||||||
|
size: hugeFileSize,
|
||||||
|
type: 'file',
|
||||||
|
uid: 502,
|
||||||
|
uname: 'apd4n'
|
||||||
|
})
|
||||||
|
|
||||||
|
noEntries = true
|
||||||
|
stream.pipe(countStream)
|
||||||
|
})
|
||||||
|
|
||||||
|
extract.on('finish', function () {
|
||||||
|
t.ok(noEntries)
|
||||||
|
t.equal(dataLength, hugeFileSize)
|
||||||
|
})
|
||||||
|
|
||||||
|
var gunzip = zlib.createGunzip()
|
||||||
|
var reader = fs.createReadStream(fixtures.HUGE)
|
||||||
|
reader.pipe(gunzip).pipe(extract)
|
||||||
|
})
|
||||||
|
|
BIN
test/fixtures/huge.tar.gz
vendored
Normal file
BIN
test/fixtures/huge.tar.gz
vendored
Normal file
Binary file not shown.
1
test/fixtures/index.js
vendored
1
test/fixtures/index.js
vendored
|
@ -14,3 +14,4 @@ exports.GNU_LONG_PATH = path.join(__dirname, 'gnu-long-path.tar')
|
||||||
exports.BASE_256_UID_GID = path.join(__dirname, 'base-256-uid-gid.tar')
|
exports.BASE_256_UID_GID = path.join(__dirname, 'base-256-uid-gid.tar')
|
||||||
exports.LARGE_UID_GID = path.join(__dirname, 'large-uid-gid.tar')
|
exports.LARGE_UID_GID = path.join(__dirname, 'large-uid-gid.tar')
|
||||||
exports.BASE_256_SIZE = path.join(__dirname, 'base-256-size.tar')
|
exports.BASE_256_SIZE = path.join(__dirname, 'base-256-size.tar')
|
||||||
|
exports.HUGE = path.join(__dirname, 'huge.tar.gz')
|
||||||
|
|
Loading…
Reference in a new issue