diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index 214b6c2..fd30399 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -13,4 +13,4 @@ jobs: uses: node-modules/github-actions/.github/workflows/node-test.yml@master with: os: 'ubuntu-latest, macos-latest, windows-latest' - version: '14, 16, 18, 20, 22, 24' + version: '18, 20, 22, 24' diff --git a/README.md b/README.md index e416e0c..87d64fc 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ fs.createReadStream('file/path/to/compress') .on('error', handleError); // You should take care of stream errors in caution, use pump to handle error in one place -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const sourceStream = fs.createReadStream('file/path/to/compress'); const gzipStream = new compressing.gzip.FileStream(); const destStream = fs.createWriteStream('path/to/destination.gz'); @@ -193,7 +193,7 @@ function onEntry(header, stream, next) => { if (header.type === 'file') { stream.pipe(fs.createWriteStream(path.join(destDir, header.name))); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return handleError(err); stream.resume(); }); diff --git a/lib/tgz/file_stream.js b/lib/tgz/file_stream.js index dbb9d5d..f2c478f 100644 --- a/lib/tgz/file_stream.js +++ b/lib/tgz/file_stream.js @@ -4,7 +4,7 @@ const tar = require('../tar'); const gzip = require('../gzip'); const utils = require('../utils'); const stream = require('stream'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const ready = require('get-ready'); class TgzFileStream extends stream.Transform { diff --git a/lib/utils.js b/lib/utils.js index 3fe8df5..b31e37a 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -2,8 +2,7 @@ const fs = require('fs'); const path = require('path'); -const mkdirp = require('mkdirp'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); // file/fileBuffer/stream exports.sourceType = source => { @@ -90,7 +89,7 @@ exports.makeUncompressFn = StreamClass => { } return new Promise((resolve, reject) => { - mkdirp(destDir, err => { + fs.mkdir(destDir, { recursive: true }, err => { if (err) return reject(err); let entryCount = 0; @@ -113,7 +112,7 @@ exports.makeUncompressFn = StreamClass => { if (header.type === 'file') { const dir = path.dirname(destFilePath); - mkdirp(dir, err => { + fs.mkdir(dir, { recursive: true }, err => { if (err) return reject(err); entryCount++; @@ -128,7 +127,7 @@ exports.makeUncompressFn = StreamClass => { const target = path.resolve(dir, header.linkname); entryCount++; - mkdirp(dir, err => { + fs.mkdir(dir, { recursive: true }, err => { if (err) return reject(err); const relativeTarget = path.relative(dir, target); @@ -139,7 +138,7 @@ exports.makeUncompressFn = StreamClass => { }); }); } else { // directory - mkdirp(destFilePath, err => { + fs.mkdir(destFilePath, { recursive: true }, err => { if (err) return reject(err); stream.resume(); }); diff --git a/package.json b/package.json index 8e5a657..6e1fcdc 100644 --- a/package.json +++ b/package.json @@ -39,14 +39,12 @@ }, "homepage": "https://github.com/node-modules/compressing#readme", "dependencies": { + "@eggjs/yauzl": "^2.11.0", "flushwritable": "^1.0.0", "get-ready": "^1.0.0", "iconv-lite": "^0.5.0", - "mkdirp": "^0.5.1", - "pump": "^3.0.0", "streamifier": "^0.1.1", "tar-stream": "^1.5.2", - "@eggjs/yauzl": "^2.11.0", "yazl": "^2.4.2" }, "devDependencies": { @@ -62,6 +60,6 @@ "uuid": "^3.0.1" }, "engines": { - "node": ">= 4.0.0" + "node": ">= 18.0.0" } } diff --git a/test/gzip/file_stream.test.js b/test/gzip/file_stream.test.js index 6a00e80..a7a18f3 100644 --- a/test/gzip/file_stream.test.js +++ b/test/gzip/file_stream.test.js @@ -2,7 +2,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); diff --git a/test/gzip/uncompress_stream.test.js b/test/gzip/uncompress_stream.test.js index 653de02..f7b3e96 100644 --- a/test/gzip/uncompress_stream.test.js +++ b/test/gzip/uncompress_stream.test.js @@ -4,7 +4,7 @@ const os = require('os'); const uuid = require('uuid'); const path = require('path'); const assert = require('assert'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const streamifier = require('streamifier'); const compressing = require('../..'); diff --git a/test/tar/file_stream.test.js b/test/tar/file_stream.test.js index d156e54..18c4719 100644 --- a/test/tar/file_stream.test.js +++ b/test/tar/file_stream.test.js @@ -6,7 +6,7 @@ const os = require('os'); const path = require('path'); const uuid = require('uuid'); const assert = require('assert'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); describe('test/tar/file_stream.test.js', () => { diff --git a/test/tar/index.test.js b/test/tar/index.test.js index 642becc..b03eef6 100644 --- a/test/tar/index.test.js +++ b/test/tar/index.test.js @@ -8,7 +8,6 @@ const uuid = require('uuid'); const compressing = require('../..'); const assert = require('assert'); const dircompare = require('dir-compare'); -const mkdirp = require('mz-modules/mkdirp'); describe('test/tar/index.test.js', () => { afterEach(mm.restore); @@ -108,7 +107,7 @@ describe('test/tar/index.test.js', () => { assert(fs.existsSync(destFile)); const destDir = path.join(os.tmpdir(), uuid.v4()); - await mkdirp(destDir); + await fs.promises.mkdir(destDir, { recursive: true }); await compressing.tar.uncompress(destFile, destDir); const stat = fs.statSync(path.join(destDir, 'bin')); assert(stat.mode === originStat.mode); diff --git a/test/tar/stream.test.js b/test/tar/stream.test.js index 2f84b6b..9a50431 100644 --- a/test/tar/stream.test.js +++ b/test/tar/stream.test.js @@ -5,7 +5,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); const TarStream = compressing.tar.Stream; diff --git a/test/tar/uncompress_stream.test.js b/test/tar/uncompress_stream.test.js index ce53b4b..f3d97c3 100644 --- a/test/tar/uncompress_stream.test.js +++ b/test/tar/uncompress_stream.test.js @@ -4,8 +4,7 @@ const os = require('os'); const path = require('path'); const uuid = require('uuid'); const assert = require('assert'); -const mkdirp = require('mkdirp'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const dircompare = require('dir-compare'); const streamifier = require('streamifier'); const { pipelinePromise } = require('../util'); @@ -22,7 +21,7 @@ describe('test/tar/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tar.UncompressStream(); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); pump(sourceStream, uncompressStream, err => { assert(!err); const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -39,7 +38,7 @@ describe('test/tar/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -52,7 +51,7 @@ describe('test/tar/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tar.UncompressStream({ source: sourceFile }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -69,7 +68,7 @@ describe('test/tar/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -83,7 +82,7 @@ describe('test/tar/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tar.UncompressStream({ source: sourceBuffer }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -103,7 +102,7 @@ describe('test/tar/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), err => { if (err) return done(err); stream.resume(); }); @@ -117,7 +116,7 @@ describe('test/tar/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tar.UncompressStream({ source: sourceStream }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -134,7 +133,7 @@ describe('test/tar/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); diff --git a/test/tgz/file_stream.test.js b/test/tgz/file_stream.test.js index 97df8af..dd7d2d4 100644 --- a/test/tgz/file_stream.test.js +++ b/test/tgz/file_stream.test.js @@ -4,7 +4,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); diff --git a/test/tgz/index.test.js b/test/tgz/index.test.js index 3389aed..f612366 100644 --- a/test/tgz/index.test.js +++ b/test/tgz/index.test.js @@ -4,7 +4,6 @@ const path = require('path'); const uuid = require('uuid'); const assert = require('assert'); const dircompare = require('dir-compare'); -const mkdirp = require('mz-modules/mkdirp'); const compressing = require('../..'); const isWindows = os.platform() === 'win32'; @@ -68,7 +67,7 @@ describe('test/tgz/index.test.js', () => { assert(fs.existsSync(destFile)); const destDir = path.join(os.tmpdir(), uuid.v4()); - await mkdirp(destDir); + await fs.promises.mkdir(destDir, { recursive: true }); await compressing.tgz.uncompress(destFile, destDir); const stat = fs.statSync(path.join(destDir, 'bin')); assert(stat.mode); diff --git a/test/tgz/stream.test.js b/test/tgz/stream.test.js index feb16d3..9d703be 100644 --- a/test/tgz/stream.test.js +++ b/test/tgz/stream.test.js @@ -5,7 +5,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); const TgzStream = compressing.tgz.Stream; diff --git a/test/tgz/uncompress_stream.test.js b/test/tgz/uncompress_stream.test.js index a1aee48..50e1b26 100644 --- a/test/tgz/uncompress_stream.test.js +++ b/test/tgz/uncompress_stream.test.js @@ -4,8 +4,7 @@ const os = require('os'); const path = require('path'); const uuid = require('uuid'); const assert = require('assert'); -const mkdirp = require('mkdirp'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const dircompare = require('dir-compare'); const { pipelinePromise } = require('../util'); const compressing = require('../..'); @@ -21,7 +20,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tgz.UncompressStream(); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); pump(sourceStream, uncompressStream, err => { console.error(err); assert(!err); @@ -39,7 +38,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -52,7 +51,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tgz.UncompressStream({ source: sourceFile }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -70,7 +69,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -84,7 +83,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tgz.UncompressStream({ source: sourceBuffer }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -101,7 +100,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -115,7 +114,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.tgz.UncompressStream({ source: sourceStream }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -135,7 +134,7 @@ describe('test/tgz/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); diff --git a/test/util.js b/test/util.js index cdb84c6..64bbd35 100644 --- a/test/util.js +++ b/test/util.js @@ -1,14 +1,5 @@ const stream = require('stream'); -const pump = require('pump'); -// impl promise pipeline on Node.js 14 -const pipelinePromise = stream.promises?.pipeline ?? function pipeline(...args) { - return new Promise((resolve, reject) => { - pump(...args, err => { - if (err) return reject(err); - resolve(); - }); - }); -}; +const pipelinePromise = stream.promises.pipeline; -exports.pipelinePromise = pipelinePromise; +module.exports = { pipelinePromise }; diff --git a/test/zip/file_stream.test.js b/test/zip/file_stream.test.js index e783792..7f195d7 100644 --- a/test/zip/file_stream.test.js +++ b/test/zip/file_stream.test.js @@ -4,7 +4,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); diff --git a/test/zip/index.test.js b/test/zip/index.test.js index 7c67eb4..bbcb831 100644 --- a/test/zip/index.test.js +++ b/test/zip/index.test.js @@ -2,7 +2,6 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const mkdirp = require('mkdirp'); const assert = require('assert'); const dircompare = require('dir-compare'); const compressing = require('../..'); @@ -25,7 +24,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressFile(file, stream)', async () => { const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); const fileStream = fs.createWriteStream(destFile); @@ -36,7 +35,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressFile(file, stream) should handle error if file not exist', async () => { const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log', 'not_exist'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); const fileStream = fs.createWriteStream(destFile); @@ -52,7 +51,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressFile(file, destStream) should error if destStream emit error', async () => { const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); const fileStream = fs.createWriteStream(destFile); setImmediate(() => fileStream.emit('error', new Error('xx'))); @@ -69,7 +68,7 @@ describe('test/zip/index.test.js', () => { const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log'); const sourceBuffer = fs.readFileSync(sourceFile); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); const fileStream = fs.createWriteStream(destFile); @@ -81,7 +80,7 @@ describe('test/zip/index.test.js', () => { const sourceFile = path.join(__dirname, '..', 'fixtures', 'xx.log'); const sourceStream = fs.createReadStream(sourceFile); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); const fileStream = fs.createWriteStream(destFile); @@ -94,7 +93,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressDir(dir, destFile)', async () => { const sourceDir = path.join(__dirname, '..', 'fixtures'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); await compressing.zip.compressDir(sourceDir, destFile); @@ -104,7 +103,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressDir(dir, destStream)', async () => { const sourceDir = path.join(__dirname, '..', 'fixtures'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); const destStream = fs.createWriteStream(destFile); console.log('dest', destFile); @@ -115,7 +114,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressDir(dir, destStream, { ignoreBase: true })', async () => { const sourceDir = path.join(__dirname, '..', 'fixtures'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); const destStream = fs.createWriteStream(destFile); console.log('dest', destFile); @@ -126,7 +125,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressDir(dir, destStream) should return promise', async () => { const sourceDir = path.join(__dirname, '..', 'fixtures'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); console.log('dest', destFile); await compressing.zip.compressDir(sourceDir, destFile); @@ -136,7 +135,7 @@ describe('test/zip/index.test.js', () => { it('zip.compressDir(dir, destStream) should reject when destStream emit error', async () => { const sourceDir = path.join(__dirname, '..', 'fixtures'); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); const destFile = path.join(destDir, uuid.v4() + '.zip'); const destStream = fs.createWriteStream(destFile); setImmediate(() => { @@ -286,7 +285,7 @@ describe('test/zip/index.test.js', () => { assert(fs.existsSync(destFile)); destDir = path.join(os.tmpdir(), uuid.v4()); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); await compressing.zip.uncompress(destFile, destDir); const stat = fs.statSync(path.join(destDir, 'bin')); assert(stat.mode === originStat.mode); diff --git a/test/zip/stream.test.js b/test/zip/stream.test.js index 2d74ad5..b5370f4 100644 --- a/test/zip/stream.test.js +++ b/test/zip/stream.test.js @@ -5,7 +5,7 @@ const fs = require('fs'); const os = require('os'); const path = require('path'); const uuid = require('uuid'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const compressing = require('../..'); const assert = require('assert'); const ZipStream = compressing.zip.Stream; diff --git a/test/zip/uncompress_stream.test.js b/test/zip/uncompress_stream.test.js index 6db1231..4242e8c 100644 --- a/test/zip/uncompress_stream.test.js +++ b/test/zip/uncompress_stream.test.js @@ -4,8 +4,7 @@ const os = require('os'); const path = require('path'); const uuid = require('uuid'); const assert = require('assert'); -const mkdirp = require('mkdirp'); -const pump = require('pump'); +const { pipeline: pump } = require('stream'); const dircompare = require('dir-compare'); const { pipelinePromise } = require('../util'); const compressing = require('../..'); @@ -21,7 +20,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream(); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); pump(sourceStream, uncompressStream, err => { assert(!err); const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -38,7 +37,7 @@ describe('test/zip/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -51,7 +50,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream({ source: sourceFile }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -70,7 +69,7 @@ describe('test/zip/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -84,7 +83,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream({ source: sourceBuffer }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -103,7 +102,7 @@ describe('test/zip/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -117,7 +116,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream({ source: sourceStream }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); uncompressStream.on('finish', () => { const res = dircompare.compareSync(originalDir, path.join(destDir, 'xxx')); @@ -136,7 +135,7 @@ describe('test/zip/uncompress_stream.test.js', () => { .then(next) .catch(done); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -181,7 +180,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream({ strip: 1 }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); pump(sourceStream, uncompressStream, err => { assert(!err); const res = dircompare.compareSync(originalDir, destDir); @@ -198,7 +197,7 @@ describe('test/zip/uncompress_stream.test.js', () => { if (header.type === 'file') { stream.pipe(fs.createWriteStream(path.join(destDir, header.name))); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); }); @@ -211,7 +210,7 @@ describe('test/zip/uncompress_stream.test.js', () => { const destDir = path.join(os.tmpdir(), uuid.v4()); const uncompressStream = new compressing.zip.UncompressStream({ strip: 2 }); - mkdirp.sync(destDir); + fs.mkdirSync(destDir, { recursive: true }); pump(sourceStream, uncompressStream, err => { assert(!err); const res = dircompare.compareSync(path.join(__dirname, '../fixtures/xxx-strip2'), destDir); @@ -228,7 +227,7 @@ describe('test/zip/uncompress_stream.test.js', () => { if (header.type === 'file') { stream.pipe(fs.createWriteStream(path.join(destDir, header.name))); } else { // directory - mkdirp(path.join(destDir, header.name), err => { + fs.mkdir(path.join(destDir, header.name), { recursive: true }, err => { if (err) return done(err); stream.resume(); });