move out slow tests to its own file and dont run per default

This commit is contained in:
Mathias Buus 2018-03-21 20:32:29 +01:00
parent b3b840b14d
commit d38b9661d2
3 changed files with 62 additions and 57 deletions

View file

@ -18,7 +18,8 @@
"tape": "^3.0.3" "tape": "^3.0.3"
}, },
"scripts": { "scripts": {
"test": "standard && tape test/*.js" "test": "standard && tape test/extract.js test/pack.js",
"test-all": "standard && tape test/*.js"
}, },
"keywords": [ "keywords": [
"tar", "tar",

View file

@ -3,8 +3,6 @@ var tar = require('../index')
var fixtures = require('./fixtures') var fixtures = require('./fixtures')
var concat = require('concat-stream') var concat = require('concat-stream')
var fs = require('fs') var fs = require('fs')
var stream = require('stream')
var zlib = require('zlib')
var clamp = function (index, len, defaultValue) { var clamp = function (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue if (typeof index !== 'number') return defaultValue
@ -536,60 +534,6 @@ test('base 256 size', function (t) {
extract.end(fs.readFileSync(fixtures.BASE_256_SIZE)) extract.end(fs.readFileSync(fixtures.BASE_256_SIZE))
}) })
test('huge', function (t) {
t.plan(1)
var extract = tar.extract()
var noEntries = false
var hugeFileSize = 8804630528 // ~8.2GB
var dataLength = 0
var countStream = new stream.Writable()
countStream._write = function (chunk, encoding, done) {
dataLength += chunk.length
done()
}
// Make sure we read the correct pax size entry for a file larger than 8GB.
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
devmajor: 0,
devminor: 0,
gid: 20,
gname: 'staff',
linkname: null,
mode: 420,
mtime: new Date(1521214967000),
name: 'huge.txt',
pax: {
'LIBARCHIVE.creationtime': '1521214954',
'SCHILY.dev': '16777218',
'SCHILY.ino': '91584182',
'SCHILY.nlink': '1',
atime: '1521214969',
ctime: '1521214967',
size: hugeFileSize.toString()
},
size: hugeFileSize,
type: 'file',
uid: 502,
uname: 'apd4n'
})
noEntries = true
stream.pipe(countStream)
})
extract.on('finish', function () {
t.ok(noEntries)
t.equal(dataLength, hugeFileSize)
})
var gunzip = zlib.createGunzip()
var reader = fs.createReadStream(fixtures.HUGE)
reader.pipe(gunzip).pipe(extract)
})
test('latin-1', function (t) { // can unpack filenames encoded in latin-1 test('latin-1', function (t) { // can unpack filenames encoded in latin-1
t.plan(3) t.plan(3)

60
test/slow.js Normal file
View file

@ -0,0 +1,60 @@
var test = require('tape')
var stream = require('readable-stream')
var zlib = require('zlib')
var fs = require('fs')
var tar = require('../')
var fixtures = require('./fixtures')
test('huge', function (t) {
t.plan(1)
var extract = tar.extract()
var noEntries = false
var hugeFileSize = 8804630528 // ~8.2GB
var dataLength = 0
var countStream = new stream.Writable()
countStream._write = function (chunk, encoding, done) {
dataLength += chunk.length
done()
}
// Make sure we read the correct pax size entry for a file larger than 8GB.
extract.on('entry', function (header, stream, callback) {
t.deepEqual(header, {
devmajor: 0,
devminor: 0,
gid: 20,
gname: 'staff',
linkname: null,
mode: 420,
mtime: new Date(1521214967000),
name: 'huge.txt',
pax: {
'LIBARCHIVE.creationtime': '1521214954',
'SCHILY.dev': '16777218',
'SCHILY.ino': '91584182',
'SCHILY.nlink': '1',
atime: '1521214969',
ctime: '1521214967',
size: hugeFileSize.toString()
},
size: hugeFileSize,
type: 'file',
uid: 502,
uname: 'apd4n'
})
noEntries = true
stream.pipe(countStream)
})
extract.on('finish', function () {
t.ok(noEntries)
t.equal(dataLength, hugeFileSize)
})
var gunzip = zlib.createGunzip()
var reader = fs.createReadStream(fixtures.HUGE)
reader.pipe(gunzip).pipe(extract)
})