From d38b9661d28923e7c31b47c1c012ff8ffb46ecb5 Mon Sep 17 00:00:00 2001 From: Mathias Buus Date: Wed, 21 Mar 2018 20:32:29 +0100 Subject: [PATCH] move out slow tests to its own file and dont run per default --- package.json | 3 ++- test/extract.js | 56 --------------------------------------------- test/slow.js | 60 +++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 57 deletions(-) create mode 100644 test/slow.js diff --git a/package.json b/package.json index 437b092..2a25292 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,8 @@ "tape": "^3.0.3" }, "scripts": { - "test": "standard && tape test/*.js" + "test": "standard && tape test/extract.js test/pack.js", + "test-all": "standard && tape test/*.js" }, "keywords": [ "tar", diff --git a/test/extract.js b/test/extract.js index 83c53f5..0cf3123 100644 --- a/test/extract.js +++ b/test/extract.js @@ -3,8 +3,6 @@ var tar = require('../index') var fixtures = require('./fixtures') var concat = require('concat-stream') var fs = require('fs') -var stream = require('stream') -var zlib = require('zlib') var clamp = function (index, len, defaultValue) { if (typeof index !== 'number') return defaultValue @@ -536,60 +534,6 @@ test('base 256 size', function (t) { extract.end(fs.readFileSync(fixtures.BASE_256_SIZE)) }) -test('huge', function (t) { - t.plan(1) - - var extract = tar.extract() - var noEntries = false - var hugeFileSize = 8804630528 // ~8.2GB - var dataLength = 0 - - var countStream = new stream.Writable() - countStream._write = function (chunk, encoding, done) { - dataLength += chunk.length - done() - } - - // Make sure we read the correct pax size entry for a file larger than 8GB. - extract.on('entry', function (header, stream, callback) { - t.deepEqual(header, { - devmajor: 0, - devminor: 0, - gid: 20, - gname: 'staff', - linkname: null, - mode: 420, - mtime: new Date(1521214967000), - name: 'huge.txt', - pax: { - 'LIBARCHIVE.creationtime': '1521214954', - 'SCHILY.dev': '16777218', - 'SCHILY.ino': '91584182', - 'SCHILY.nlink': '1', - atime: '1521214969', - ctime: '1521214967', - size: hugeFileSize.toString() - }, - size: hugeFileSize, - type: 'file', - uid: 502, - uname: 'apd4n' - }) - - noEntries = true - stream.pipe(countStream) - }) - - extract.on('finish', function () { - t.ok(noEntries) - t.equal(dataLength, hugeFileSize) - }) - - var gunzip = zlib.createGunzip() - var reader = fs.createReadStream(fixtures.HUGE) - reader.pipe(gunzip).pipe(extract) -}) - test('latin-1', function (t) { // can unpack filenames encoded in latin-1 t.plan(3) diff --git a/test/slow.js b/test/slow.js new file mode 100644 index 0000000..c8a1bc0 --- /dev/null +++ b/test/slow.js @@ -0,0 +1,60 @@ +var test = require('tape') +var stream = require('readable-stream') +var zlib = require('zlib') +var fs = require('fs') +var tar = require('../') +var fixtures = require('./fixtures') + +test('huge', function (t) { + t.plan(1) + + var extract = tar.extract() + var noEntries = false + var hugeFileSize = 8804630528 // ~8.2GB + var dataLength = 0 + + var countStream = new stream.Writable() + countStream._write = function (chunk, encoding, done) { + dataLength += chunk.length + done() + } + + // Make sure we read the correct pax size entry for a file larger than 8GB. + extract.on('entry', function (header, stream, callback) { + t.deepEqual(header, { + devmajor: 0, + devminor: 0, + gid: 20, + gname: 'staff', + linkname: null, + mode: 420, + mtime: new Date(1521214967000), + name: 'huge.txt', + pax: { + 'LIBARCHIVE.creationtime': '1521214954', + 'SCHILY.dev': '16777218', + 'SCHILY.ino': '91584182', + 'SCHILY.nlink': '1', + atime: '1521214969', + ctime: '1521214967', + size: hugeFileSize.toString() + }, + size: hugeFileSize, + type: 'file', + uid: 502, + uname: 'apd4n' + }) + + noEntries = true + stream.pipe(countStream) + }) + + extract.on('finish', function () { + t.ok(noEntries) + t.equal(dataLength, hugeFileSize) + }) + + var gunzip = zlib.createGunzip() + var reader = fs.createReadStream(fixtures.HUGE) + reader.pipe(gunzip).pipe(extract) +})