From 8c1d8c362577f2be2d7ce4bc35d00ad78b42ae8a Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 20:14:58 -0700 Subject: [PATCH 01/16] chore: upgrade mocha to fix some devDeps issues with npm audit --- package-lock.json | 249 +++++++++++++++++++++++++++++----------------- package.json | 2 +- 2 files changed, 156 insertions(+), 95 deletions(-) diff --git a/package-lock.json b/package-lock.json index db88179a..c806a771 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4896,12 +4896,6 @@ "integrity": "sha512-ccVHpE72+tcIKaGMql33x5MAjKQIZrk+3x2GbJ7TeraUCZWHoT+KSZpoC+JQFsUBlSTXUrBaGiF0j6zVTepPLg==", "dev": true }, - "detect-file": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz", - "integrity": "sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc=", - "dev": true - }, "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", @@ -5148,6 +5142,12 @@ "is-electron-renderer": "^2.0.0" } }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, "end-of-stream": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", @@ -5623,15 +5623,6 @@ } } }, - "expand-tilde": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", - "integrity": "sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=", - "dev": true, - "requires": { - "homedir-polyfill": "^1.0.1" - } - }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -5892,18 +5883,6 @@ } } }, - "findup-sync": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-2.0.0.tgz", - "integrity": "sha1-kyaxSIwi0aYIhlCoaQGy2akKLLw=", - "dev": true, - "requires": { - "detect-file": "^1.0.0", - "is-glob": "^3.1.0", - "micromatch": "^3.0.4", - "resolve-dir": "^1.0.1" - } - }, "flat": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", @@ -6197,30 +6176,6 @@ "integrity": "sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs=", "dev": true }, - "global-modules": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", - "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", - "dev": true, - "requires": { - "global-prefix": "^1.0.1", - "is-windows": "^1.0.1", - "resolve-dir": "^1.0.0" - } - }, - "global-prefix": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", - "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", - "dev": true, - "requires": { - "expand-tilde": "^2.0.2", - "homedir-polyfill": "^1.0.1", - "ini": "^1.3.4", - "is-windows": "^1.0.1", - "which": "^1.2.14" - } - }, "globals": { "version": "11.11.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.11.0.tgz", @@ -6359,15 +6314,6 @@ "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", "dev": true }, - "homedir-polyfill": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.1.tgz", - "integrity": "sha1-TCu8inWJmP7r9e1oWA921GdotLw=", - "dev": true, - "requires": { - "parse-passwd": "^1.0.0" - } - }, "hook-std": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/hook-std/-/hook-std-1.2.0.tgz", @@ -7339,9 +7285,9 @@ } }, "mocha": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-6.0.0.tgz", - "integrity": "sha512-A7g9k3yr8oJaXn2IItFnfgjyxFc/LTe6Wwv7FczP+e8G74o9xYNSbMYmCf1ouldRojLrFcOb+z75P6Ak0GX6ug==", + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-6.1.4.tgz", + "integrity": "sha512-PN8CIy4RXsIoxoFJzS4QNnCH4psUCPWc4/rPrst/ecSJJbLBkubMiyGCP2Kj/9YnWbotFqAoeXyXMucj7gwCFg==", "dev": true, "requires": { "ansi-colors": "3.2.3", @@ -7349,26 +7295,38 @@ "debug": "3.2.6", "diff": "3.5.0", "escape-string-regexp": "1.0.5", - "findup-sync": "2.0.0", + "find-up": "3.0.0", "glob": "7.1.3", "growl": "1.10.5", "he": "1.2.0", - "js-yaml": "3.12.0", + "js-yaml": "3.13.1", "log-symbols": "2.2.0", "minimatch": "3.0.4", "mkdirp": "0.5.1", "ms": "2.1.1", - "node-environment-flags": "1.0.4", + "node-environment-flags": "1.0.5", "object.assign": "4.1.0", "strip-json-comments": "2.0.1", "supports-color": "6.0.0", "which": "1.3.1", "wide-align": "1.1.3", - "yargs": "12.0.5", - "yargs-parser": "11.1.1", + "yargs": "13.2.2", + "yargs-parser": "13.0.0", "yargs-unparser": "1.5.0" }, "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, "debug": { "version": "3.2.6", "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", @@ -7378,28 +7336,109 @@ "ms": "^2.1.1" } }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, "he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", "dev": true }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, "js-yaml": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", - "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", "dev": true, "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" } }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, "ms": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", "dev": true }, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, "supports-color": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", @@ -7408,6 +7447,35 @@ "requires": { "has-flag": "^3.0.0" } + }, + "yargs": { + "version": "13.2.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.2.2.tgz", + "integrity": "sha512-WyEoxgyTD3w5XRpAQNYUB9ycVH/PQrToaTXdYXRdOXvEy1l19br+VJsc0vcO8PTGg5ro/l/GY7F/JMEBmI0BxA==", + "dev": true, + "requires": { + "cliui": "^4.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "os-locale": "^3.1.0", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.0.0" + } + }, + "yargs-parser": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.0.0.tgz", + "integrity": "sha512-w2LXjoL8oRdRQN+hOyppuXs+V/fVAYtpcrRxZuF7Kt/Oc+Jr2uAcVntaUTNT6w5ihoWfFDpNY8CPx1QskxZ/pw==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } } } }, @@ -7493,12 +7561,21 @@ } }, "node-environment-flags": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.4.tgz", - "integrity": "sha512-M9rwCnWVLW7PX+NUWe3ejEdiLYinRpsEre9hMkU/6NS4h+EEulYaDH1gCEZ2gyXsmw+RXYDaV2JkkTNcsPDJ0Q==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.5.tgz", + "integrity": "sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ==", "dev": true, "requires": { - "object.getownpropertydescriptors": "^2.0.3" + "object.getownpropertydescriptors": "^2.0.3", + "semver": "^5.7.0" + }, + "dependencies": { + "semver": { + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", + "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==", + "dev": true + } } }, "node-fetch": { @@ -11091,9 +11168,9 @@ }, "dependencies": { "object-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.0.tgz", - "integrity": "sha512-6OO5X1+2tYkNyNEx6TsCxEqFfRWaqx6EtMiSbGrw8Ob8v9Ne+Hl8rBAgLBZn5wjEz3s/s6U1WXFUFOcxxAwUpg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "dev": true } } @@ -11285,12 +11362,6 @@ "error-ex": "^1.2.0" } }, - "parse-passwd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", - "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=", - "dev": true - }, "pascalcase": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", @@ -11762,16 +11833,6 @@ "path-parse": "^1.0.6" } }, - "resolve-dir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", - "integrity": "sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=", - "dev": true, - "requires": { - "expand-tilde": "^2.0.0", - "global-modules": "^1.0.0" - } - }, "resolve-from": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz", diff --git a/package.json b/package.json index b01e54ec..c0c3906f 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "electron": "^4.0.5", "electron-mocha": "^6.0.4", "lodash": "^4.17.11", - "mocha": "^6.0.0", + "mocha": "^6.1.3", "mz": "^2.7.0", "rimraf": "^2.6.3", "semantic-release": "^15.13.3", From 7d089c58a918dc373ed869ec1ada7e32e58aa39f Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 20:17:26 -0700 Subject: [PATCH 02/16] chore: upgrade commander and electron deps --- package-lock.json | 26 +++++++++++++------------- package.json | 4 ++-- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/package-lock.json b/package-lock.json index c806a771..4bece8ec 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4143,9 +4143,9 @@ } }, "commander": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", - "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==" + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.0.tgz", + "integrity": "sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==" }, "compare-func": { "version": "1.3.2", @@ -4998,9 +4998,9 @@ } }, "electron": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/electron/-/electron-4.0.5.tgz", - "integrity": "sha512-UWFH6SrzNtzfvusGUFYxXDrgsUEbtBXkH/66hpDWxjA2Ckt7ozcYIujZpshbr7LPy8kV3ZRxIvoyCMdaS5DkVQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/electron/-/electron-4.1.5.tgz", + "integrity": "sha512-0VZzUd4vZaUeSLdxJI/XMrMnPN7AROjPFZOiNgZZkYRUUEjGHfaSAbCJyxuXtii52KGhzGL0JgW0q5QmQ3ykKQ==", "dev": true, "requires": { "@types/node": "^10.12.18", @@ -7212,18 +7212,18 @@ "dev": true }, "mime-db": { - "version": "1.38.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.38.0.tgz", - "integrity": "sha512-bqVioMFFzc2awcdJZIzR3HjZFX20QhilVS7hytkKrv7xFAn8bM1gzc/FOX2awLISvWe0PV8ptFKcon+wZ5qYkg==", + "version": "1.40.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", + "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==", "dev": true }, "mime-types": { - "version": "2.1.22", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.22.tgz", - "integrity": "sha512-aGl6TZGnhm/li6F7yx82bJiBZwgiEa4Hf6CNr8YO+r5UHr53tSTYZb102zyU50DOWWKeOv0uQLRL0/9EiKWCog==", + "version": "2.1.24", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", + "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", "dev": true, "requires": { - "mime-db": "~1.38.0" + "mime-db": "1.40.0" } }, "mimic-fn": { diff --git a/package.json b/package.json index c0c3906f..636a05b3 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ }, "dependencies": { "chromium-pickle-js": "^0.2.0", - "commander": "^2.19.0", + "commander": "^2.20.0", "cuint": "^0.2.2", "glob": "^7.1.3", "minimatch": "^3.0.4", @@ -39,7 +39,7 @@ }, "devDependencies": { "@continuous-auth/semantic-release-npm": "^1.0.3", - "electron": "^4.0.5", + "electron": "^4.1.4", "electron-mocha": "^6.0.4", "lodash": "^4.17.11", "mocha": "^6.1.3", From 4b126fd0ce8d3aa65c070e0a24817a33c74792b1 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 20:21:27 -0700 Subject: [PATCH 03/16] chore: upgrade electron-mocha, which requires Node >= 8 --- package-lock.json | 185 +++++++++++++++++++++++++++++++--------------- package.json | 4 +- 2 files changed, 127 insertions(+), 62 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4bece8ec..d2f952a1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5049,31 +5049,51 @@ } }, "electron-mocha": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/electron-mocha/-/electron-mocha-6.0.4.tgz", - "integrity": "sha512-WVJAldQRv7hC7Db65qKU1TQ3w9rwqPXzz3UzpQQDRPqIxd7FERxnwRXqb+bQKBOdO5ThnHxMtnHWVJgG+FZivw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/electron-mocha/-/electron-mocha-8.0.1.tgz", + "integrity": "sha512-FG7viwoP8zmpdNkX1dyTa95+Wn1JEDu/GA/QhBnpLJD/Kh9/b3al6tSgiviPK+qPW4J27UtDWJbJcKDKMOWDiw==", "dev": true, "requires": { - "commander": "^2.15.1", + "ansi-colors": "^3.2.4", "electron-window": "^0.8.0", - "fs-extra": "^6.0.1", - "mocha": "^5.2.0", - "which": "^1.3.1" + "fs-extra": "^7.0.1", + "log-symbols": "^2.2.0", + "mocha": "^6.1.1", + "which": "^1.3.1", + "yargs": "^13.2.2" }, "dependencies": { - "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "ansi-colors": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", + "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==", + "dev": true + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, "requires": { - "ms": "2.0.0" + "locate-path": "^3.0.0" } }, "fs-extra": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-6.0.1.tgz", - "integrity": "sha512-GnyIkKhhzXZUWFCaJzvyDLEEgDkPfb4/TPvJCJVuS8MWZgoSsErf++QpiAlDnKFcqhRlm+tIOcencCjyJE6ZCA==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", "dev": true, "requires": { "graceful-fs": "^4.1.2", @@ -5081,54 +5101,105 @@ "universalify": "^0.1.0" } }, - "glob": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" } }, - "jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", "dev": true, "requires": { - "graceful-fs": "^4.1.6" + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" } }, - "mocha": { + "strip-ansi": { "version": "5.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", - "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", - "dev": true, - "requires": { - "browser-stdout": "1.3.1", - "commander": "2.15.1", - "debug": "3.1.0", - "diff": "3.5.0", - "escape-string-regexp": "1.0.5", - "glob": "7.1.2", - "growl": "1.10.5", - "he": "1.1.1", - "minimatch": "3.0.4", - "mkdirp": "0.5.1", - "supports-color": "5.4.0" - }, - "dependencies": { - "commander": { - "version": "2.15.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", - "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", - "dev": true - } + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "yargs": { + "version": "13.2.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.2.2.tgz", + "integrity": "sha512-WyEoxgyTD3w5XRpAQNYUB9ycVH/PQrToaTXdYXRdOXvEy1l19br+VJsc0vcO8PTGg5ro/l/GY7F/JMEBmI0BxA==", + "dev": true, + "requires": { + "cliui": "^4.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "os-locale": "^3.1.0", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.0.0" + } + }, + "yargs-parser": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.0.0.tgz", + "integrity": "sha512-w2LXjoL8oRdRQN+hOyppuXs+V/fVAYtpcrRxZuF7Kt/Oc+Jr2uAcVntaUTNT6w5ihoWfFDpNY8CPx1QskxZ/pw==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" } } } @@ -6308,12 +6379,6 @@ } } }, - "he": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", - "dev": true - }, "hook-std": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/hook-std/-/hook-std-1.2.0.tgz", diff --git a/package.json b/package.json index 636a05b3..1000eb3f 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "asar": "./bin/asar.js" }, "engines": { - "node": ">=6.0" + "node": ">=8.0" }, "license": "MIT", "homepage": "https://github.com/electron/asar", @@ -40,7 +40,7 @@ "devDependencies": { "@continuous-auth/semantic-release-npm": "^1.0.3", "electron": "^4.1.4", - "electron-mocha": "^6.0.4", + "electron-mocha": "^8.0.1", "lodash": "^4.17.11", "mocha": "^6.1.3", "mz": "^2.7.0", From ffd2f5198f06f164f4182f1752af1837071124ca Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 20:24:29 -0700 Subject: [PATCH 04/16] docs: update code examples for Node 8 --- README.md | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index d14dec52..5c061c11 100644 --- a/README.md +++ b/README.md @@ -85,15 +85,14 @@ $ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}" ### Example -```js -var asar = require('asar'); +```javascript +const asar = require('asar'); -var src = 'some/path/'; -var dest = 'name.asar'; +const src = 'some/path/'; +const dest = 'name.asar'; -asar.createPackage(src, dest).then(function() { - console.log('done.'); -}) +await asar.createPackage(src, dest); +console.log('done.'); ``` Please note that there is currently **no** error handling provided! @@ -103,19 +102,18 @@ You can pass in a `transform` option, that is a function, which either returns nothing, or a `stream.Transform`. The latter will be used on files that will be in the `.asar` file to transform them (e.g. compress). -```js -var asar = require('asar'); +```javascript +const asar = require('asar'); -var src = 'some/path/'; -var dest = 'name.asar'; +const src = 'some/path/'; +const dest = 'name.asar'; -function transform(filename) { +function transform (filename) { return new CustomTransformStream() } -asar.createPackageWithOptions(src, dest, { transform: transform }).then(function() { - console.log('done.'); -}) +await asar.createPackageWithOptions(src, dest, { transform: transform }); +console.log('done.'); ``` ## Using with grunt From 54d4d810e746a38f8e8e10a744b5f57579ae7809 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 20:29:48 -0700 Subject: [PATCH 05/16] refactor: replace pify with util.promisify --- lib/asar.js | 4 ++-- lib/crawlfs.js | 6 +++--- lib/disk.js | 6 +++--- lib/filesystem.js | 4 ++-- package-lock.json | 5 ----- package.json | 1 - test/api-spec.js | 6 +++--- test/cli-spec.js | 6 +++--- test/util/compareFiles.js | 4 ++-- 9 files changed, 18 insertions(+), 24 deletions(-) diff --git a/lib/asar.js b/lib/asar.js index 85a0dfae..5090f3ec 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -1,11 +1,11 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') const fs = process.versions.electron ? require('original-fs') : require('fs') const path = require('path') const minimatch = require('minimatch') -const mkdirp = pify(require('mkdirp')) +const mkdirp = promisify(require('mkdirp')) const Filesystem = require('./filesystem') const disk = require('./disk') diff --git a/lib/crawlfs.js b/lib/crawlfs.js index 52033f6f..3b94a440 100644 --- a/lib/crawlfs.js +++ b/lib/crawlfs.js @@ -1,9 +1,9 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) -const glob = pify(require('glob')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const glob = promisify(require('glob')) function determineFileType (filename) { return fs.lstat(filename) diff --git a/lib/disk.js b/lib/disk.js index a40c54e1..4763a2a1 100644 --- a/lib/disk.js +++ b/lib/disk.js @@ -1,10 +1,10 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) const path = require('path') -const mkdirp = pify(require('mkdirp')) +const mkdirp = promisify(require('mkdirp')) const pickle = require('chromium-pickle-js') const Filesystem = require('./filesystem') diff --git a/lib/filesystem.js b/lib/filesystem.js index e3d16d72..7799fbe7 100644 --- a/lib/filesystem.js +++ b/lib/filesystem.js @@ -1,8 +1,8 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) const path = require('path') const tmp = require('tmp-promise') const UINT64 = require('cuint').UINT64 diff --git a/package-lock.json b/package-lock.json index d2f952a1..4522cbe0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11499,11 +11499,6 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", "dev": true }, - "pify": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", - "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" - }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", diff --git a/package.json b/package.json index 1000eb3f..e25ead58 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,6 @@ "glob": "^7.1.3", "minimatch": "^3.0.4", "mkdirp": "^0.5.1", - "pify": "^4.0.1", "tmp-promise": "^1.0.5" }, "devDependencies": { diff --git a/test/api-spec.js b/test/api-spec.js index a3fb3650..a88fa029 100644 --- a/test/api-spec.js +++ b/test/api-spec.js @@ -1,12 +1,12 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') const assert = require('assert') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) const os = require('os') const path = require('path') -const rimraf = pify(require('rimraf')) +const rimraf = promisify(require('rimraf')) const asar = require('..') const compDirs = require('./util/compareDirectories') diff --git a/test/cli-spec.js b/test/cli-spec.js index 25272fdd..4d45f9cc 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -1,13 +1,13 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') const assert = require('assert') const { exec } = require('mz/child_process') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) const os = require('os') const path = require('path') -const rimraf = pify(require('rimraf')) +const rimraf = promisify(require('rimraf')) const compDirs = require('./util/compareDirectories') const compFileLists = require('./util/compareFileLists') diff --git a/test/util/compareFiles.js b/test/util/compareFiles.js index bad7c282..ad5b02d6 100644 --- a/test/util/compareFiles.js +++ b/test/util/compareFiles.js @@ -1,9 +1,9 @@ 'use strict' -const pify = require('pify') +const { promisify } = require('util') const assert = require('assert') -const fs = pify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) module.exports = function (actualFilePath, expectedFilePath) { return Promise.all([fs.readFile(actualFilePath, 'utf8'), fs.readFile(expectedFilePath, 'utf8')]) From a76c7a725ed06091e6af22a5d39e03ae7654025c Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 21:50:40 -0700 Subject: [PATCH 06/16] refactor: finish promisify conversion & use async/await everywhere --- lib/asar.js | 35 +++--- lib/crawlfs.js | 47 ++++---- lib/disk.js | 29 +++-- lib/filesystem.js | 38 +++---- lib/wrapped-fs.js | 22 ++++ test/api-spec.js | 82 +++++++------ test/cli-spec.js | 196 ++++++++++++++------------------ test/util/compareDirectories.js | 89 +++++++-------- test/util/compareFiles.js | 10 +- 9 files changed, 262 insertions(+), 286 deletions(-) create mode 100644 lib/wrapped-fs.js diff --git a/lib/asar.js b/lib/asar.js index 5090f3ec..3742639d 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -1,11 +1,8 @@ 'use strict' -const { promisify } = require('util') - -const fs = process.versions.electron ? require('original-fs') : require('fs') +const fs = require('./wrapped-fs') const path = require('path') const minimatch = require('minimatch') -const mkdirp = promisify(require('mkdirp')) const Filesystem = require('./filesystem') const disk = require('./disk') @@ -29,11 +26,11 @@ function isUnpackedDir (dirPath, pattern, unpackDirs) { } } -module.exports.createPackage = function (src, dest) { +module.exports.createPackage = async function (src, dest) { return module.exports.createPackageWithOptions(src, dest, {}) } -module.exports.createPackageWithOptions = function (src, dest, options) { +module.exports.createPackageWithOptions = async function (src, dest, options) { const globOptions = options.globOptions ? options.globOptions : {} globOptions.dot = options.dot === undefined ? true : options.dot @@ -42,8 +39,8 @@ module.exports.createPackageWithOptions = function (src, dest, options) { pattern = src + options.pattern } - return crawlFilesystem(pattern, globOptions) - .then(([filenames, metadata]) => module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)) + const [filenames, metadata] = await crawlFilesystem(pattern, globOptions) + return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options) } /* @@ -54,7 +51,7 @@ filenames: Array of filenames relative to src. metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) options: The options. */ -module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options) { +module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) { if (typeof metadata === 'undefined' || metadata === null) { metadata = {} } if (typeof options === 'undefined' || options === null) { options = {} } @@ -106,11 +103,11 @@ module.exports.createPackageFromFiles = function (src, dest, filenames, metadata filenamesSorted = filenames } - const handleFile = function (filename) { + const handleFile = async function (filename) { let file = metadata[filename] let type if (!file) { - const stat = fs.lstatSync(filename) + const stat = await fs.lstat(filename) if (stat.isDirectory()) { type = 'directory' } if (stat.isFile()) { type = 'file' } if (stat.isSymbolicLink()) { type = 'link' } @@ -146,18 +143,18 @@ module.exports.createPackageFromFiles = function (src, dest, filenames, metadata return Promise.resolve() } - const insertsDone = function () { - return mkdirp(path.dirname(dest)) - .then(() => disk.writeFilesystem(dest, filesystem, files, metadata)) + const insertsDone = async function () { + await fs.mkdirp(path.dirname(dest)) + return disk.writeFilesystem(dest, filesystem, files, metadata) } const names = filenamesSorted.slice() - const next = function (name) { + const next = async function (name) { if (!name) { return insertsDone() } - return handleFile(name) - .then(() => next(names.shift())) + await handleFile(name) + return next(names.shift()) } return next(names.shift()) @@ -185,7 +182,7 @@ module.exports.extractAll = function (archive, dest) { const followLinks = process.platform === 'win32' // create destination directory - mkdirp.sync(dest) + fs.mkdirpSync(dest) return filenames.map((filename) => { filename = filename.substr(1) // get rid of leading slash @@ -193,7 +190,7 @@ module.exports.extractAll = function (archive, dest) { const file = filesystem.getFile(filename, followLinks) if (file.files) { // it's a directory, create it and continue with the next entry - mkdirp.sync(destFilename) + fs.mkdirpSync(destFilename) } else if (file.link) { // it's a symlink, create a symlink const linkSrcPath = path.dirname(path.join(dest, file.link)) diff --git a/lib/crawlfs.js b/lib/crawlfs.js index 3b94a440..cff46533 100644 --- a/lib/crawlfs.js +++ b/lib/crawlfs.js @@ -2,36 +2,31 @@ const { promisify } = require('util') -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('./wrapped-fs') const glob = promisify(require('glob')) -function determineFileType (filename) { - return fs.lstat(filename) - .then(stat => { - if (stat.isFile()) { - return [filename, { type: 'file', stat: stat }] - } else if (stat.isDirectory()) { - return [filename, { type: 'directory', stat: stat }] - } else if (stat.isSymbolicLink()) { - return [filename, { type: 'link', stat: stat }] - } +async function determineFileType (filename) { + const stat = await fs.lstat(filename) + if (stat.isFile()) { + return [filename, { type: 'file', stat: stat }] + } else if (stat.isDirectory()) { + return [filename, { type: 'directory', stat: stat }] + } else if (stat.isSymbolicLink()) { + return [filename, { type: 'link', stat: stat }] + } - return [filename, undefined] - }) + return [filename, undefined] } -module.exports = function (dir, options) { +module.exports = async function (dir, options) { const metadata = {} - return glob(dir, options) - .then(filenames => Promise.all(filenames.map(filename => determineFileType(filename)))) - .then(results => { - const filenames = [] - for (const [filename, type] of results) { - filenames.push(filename) - if (type) { - metadata[filename] = type - } - } - return [filenames, metadata] - }) + const crawled = await glob(dir, options) + const results = await Promise.all(crawled.map(filename => determineFileType(filename))) + const filenames = results.map(([filename, type]) => { + if (type) { + metadata[filename] = type + } + return filename + }) + return [filenames, metadata] } diff --git a/lib/disk.js b/lib/disk.js index 4763a2a1..72a3949e 100644 --- a/lib/disk.js +++ b/lib/disk.js @@ -1,24 +1,21 @@ 'use strict' -const { promisify } = require('util') - -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('./wrapped-fs') const path = require('path') -const mkdirp = promisify(require('mkdirp')) const pickle = require('chromium-pickle-js') const Filesystem = require('./filesystem') let filesystemCache = {} -function copyFile (dest, src, filename) { +async function copyFile (dest, src, filename) { const srcFile = path.join(src, filename) const targetFile = path.join(dest, filename) - return Promise.all([fs.readFile(srcFile), fs.stat(srcFile), mkdirp(path.dirname(targetFile))]) - .then(([content, stats, _]) => fs.writeFile(targetFile, content, { mode: stats.mode })) + const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))]) + return fs.writeFile(targetFile, content, { mode: stats.mode }) } -function streamTransformedFile (originalFilename, outStream, transformed) { +async function streamTransformedFile (originalFilename, outStream, transformed) { return new Promise((resolve, reject) => { const stream = fs.createReadStream(transformed ? transformed.path : originalFilename) stream.pipe(outStream, { end: false }) @@ -27,20 +24,19 @@ function streamTransformedFile (originalFilename, outStream, transformed) { }) } -const writeFileListToStream = function (dest, filesystem, out, list, metadata) { - let promise = Promise.resolve() +const writeFileListToStream = async function (dest, filesystem, out, list, metadata) { for (const file of list) { if (file.unpack) { // the file should not be packed into archive const filename = path.relative(filesystem.src, file.filename) - promise = promise.then(() => copyFile(`${dest}.unpacked`, filesystem.src, filename)) + await copyFile(`${dest}.unpacked`, filesystem.src, filename) } else { - promise = promise.then(() => streamTransformedFile(file.filename, out, metadata[file.filename].transformed)) + await streamTransformedFile(file.filename, out, metadata[file.filename].transformed) } } - return promise.then(() => out.end()) + return out.end() } -module.exports.writeFilesystem = function (dest, filesystem, files, metadata) { +module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) { const headerPickle = pickle.createEmpty() headerPickle.writeString(JSON.stringify(filesystem.header)) const headerBuf = headerPickle.toBuffer() @@ -50,11 +46,12 @@ module.exports.writeFilesystem = function (dest, filesystem, files, metadata) { const sizeBuf = sizePickle.toBuffer() const out = fs.createWriteStream(dest) - return new Promise((resolve, reject) => { + await new Promise((resolve, reject) => { out.on('error', reject) out.write(sizeBuf) return out.write(headerBuf, () => resolve()) - }).then(() => writeFileListToStream(dest, filesystem, out, files, metadata)) + }) + return writeFileListToStream(dest, filesystem, out, files, metadata) } module.exports.readArchiveHeaderSync = function (archive) { diff --git a/lib/filesystem.js b/lib/filesystem.js index 7799fbe7..559f3887 100644 --- a/lib/filesystem.js +++ b/lib/filesystem.js @@ -1,8 +1,6 @@ 'use strict' -const { promisify } = require('util') - -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('./wrapped-fs') const path = require('path') const tmp = require('tmp-promise') const UINT64 = require('cuint').UINT64 @@ -50,7 +48,7 @@ class Filesystem { return node.files } - insertFile (p, shouldUnpack, file, options) { + async insertFile (p, shouldUnpack, file, options) { const dirNode = this.searchNodeFromPath(path.dirname(p)) const node = this.searchNodeFromPath(p) if (shouldUnpack || dirNode.unpacked) { @@ -84,25 +82,21 @@ class Filesystem { const transformed = options.transform && options.transform(p) if (transformed) { - return tmp.file() - .then(tmpfile => { - return new Promise((resolve, reject) => { - const out = fs.createWriteStream(tmpfile.path) - const stream = fs.createReadStream(p) - - stream.pipe(transformed).pipe(out) - return out.on('close', () => { - return fs.lstat(tmpfile.path) - .then(stat => { - file.transformed = { - path: tmpfile.path, - stat - } - return handler(resolve, reject) - }) - }) - }) + const tmpfile = await tmp.file() + return new Promise((resolve, reject) => { + const out = fs.createWriteStream(tmpfile.path) + const stream = fs.createReadStream(p) + + stream.pipe(transformed).pipe(out) + return out.on('close', async () => { + const stat = await fs.lstat(tmpfile.path) + file.transformed = { + path: tmpfile.path, + stat + } + return handler(resolve, reject) }) + }) } else { return handler() } diff --git a/lib/wrapped-fs.js b/lib/wrapped-fs.js new file mode 100644 index 00000000..4dad09c6 --- /dev/null +++ b/lib/wrapped-fs.js @@ -0,0 +1,22 @@ +'use strict' + +const { promisify } = require('util') + +const fs = process.versions.electron ? require('original-fs') : require('fs') +const mkdirp = require('mkdirp') + +const methods = [ + 'lstat', + 'readFile', + 'stat', + 'writeFile' +] + +for (const method of methods) { + fs[method] = promisify(fs[method]) +} +// To make it more like fs-extra +fs.mkdirp = promisify(mkdirp) +fs.mkdirpSync = mkdirp.sync + +module.exports = fs diff --git a/test/api-spec.js b/test/api-spec.js index a88fa029..73cd7e08 100644 --- a/test/api-spec.js +++ b/test/api-spec.js @@ -1,12 +1,10 @@ 'use strict' -const { promisify } = require('util') - const assert = require('assert') -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('../lib/wrapped-fs') const os = require('os') const path = require('path') -const rimraf = promisify(require('rimraf')) +const rimraf = require('rimraf') const asar = require('..') const compDirs = require('./util/compareDirectories') @@ -14,82 +12,82 @@ const compFileLists = require('./util/compareFileLists') const compFiles = require('./util/compareFiles') const transform = require('./util/transformStream') -function assertPackageListEquals (actualList, expectedFilename) { - return fs.readFile(expectedFilename, 'utf8') - .then(expected => compFileLists(actualList.join('\n'), expected)) +async function assertPackageListEquals (actualList, expectedFilename) { + const expected = await fs.readFile(expectedFilename, 'utf8') + return compFileLists(actualList.join('\n'), expected) } describe('api', function () { beforeEach(() => { rimraf.sync(path.join(__dirname, '..', 'tmp'), fs) }) - it('should create archive from directory', () => { - return asar.createPackage('test/input/packthis/', 'tmp/packthis-api.asar') - .then(() => compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar')) + it('should create archive from directory', async () => { + await asar.createPackage('test/input/packthis/', 'tmp/packthis-api.asar') + return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar') }) if (os.platform() === 'win32') { - it('should create archive with windows-style path separators', () => { - asar.createPackage('test\\input\\packthis\\', 'tmp\\packthis-api.asar') - .then(() => compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar')) + it('should create archive with windows-style path separators', async () => { + await asar.createPackage('test\\input\\packthis\\', 'tmp\\packthis-api.asar') + return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar') }) } - it('should create archive from directory (without hidden files)', () => { - return asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-without-hidden-api.asar', { dot: false }) - .then(() => compFiles('tmp/packthis-without-hidden-api.asar', 'test/expected/packthis-without-hidden.asar')) + it('should create archive from directory (without hidden files)', async () => { + await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-without-hidden-api.asar', { dot: false }) + return compFiles('tmp/packthis-without-hidden-api.asar', 'test/expected/packthis-without-hidden.asar') }) - it('should create archive from directory (with transformed files)', () => { - return asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-transformed.asar', { transform }) - .then(() => compFiles('tmp/packthis-api-transformed.asar', 'test/expected/packthis-transformed.asar')) + it('should create archive from directory (with transformed files)', async () => { + await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-transformed.asar', { transform }) + return compFiles('tmp/packthis-api-transformed.asar', 'test/expected/packthis-transformed.asar') }) - it('should create archive from directory (with nothing packed)', () => { - return asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-unpacked.asar', { unpackDir: '**' }) - .then(() => compFiles('tmp/packthis-api-unpacked.asar', 'test/expected/packthis-all-unpacked.asar')) - .then(() => compDirs('tmp/packthis-api-unpacked.asar.unpacked', 'test/expected/extractthis')) + it('should create archive from directory (with nothing packed)', async () => { + await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-unpacked.asar', { unpackDir: '**' }) + await compFiles('tmp/packthis-api-unpacked.asar', 'test/expected/packthis-all-unpacked.asar') + return compDirs('tmp/packthis-api-unpacked.asar.unpacked', 'test/expected/extractthis') }) - it('should list files/dirs in archive', function () { + it('should list files/dirs in archive', async () => { return assertPackageListEquals(asar.listPackage('test/input/extractthis.asar'), 'test/expected/extractthis-filelist.txt') }) - it('should list files/dirs in archive with option', function () { + it('should list files/dirs in archive with option', async () => { return assertPackageListEquals(asar.listPackage('test/input/extractthis-unpack-dir.asar', { isPack: true }), 'test/expected/extractthis-filelist-with-option.txt') }) - it('should extract a text file from archive', function () { + it('should extract a text file from archive', async () => { const actual = asar.extractFile('test/input/extractthis.asar', 'dir1/file1.txt').toString('utf8') - let expected = fs.readFileSync('test/expected/extractthis/dir1/file1.txt', 'utf8') + let expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt', 'utf8') return compFileLists(actual, expected) }) - it('should extract a binary file from archive', function () { + it('should extract a binary file from archive', async () => { const actual = asar.extractFile('test/input/extractthis.asar', 'dir2/file2.png') - const expected = fs.readFileSync('test/expected/extractthis/dir2/file2.png') + const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png') return assert.strictEqual(actual.toString(), expected.toString()) }) - it('should extract a binary file from archive with unpacked files', function () { + it('should extract a binary file from archive with unpacked files', async () => { const actual = asar.extractFile('test/input/extractthis-unpack.asar', 'dir2/file2.png') - const expected = fs.readFileSync('test/expected/extractthis/dir2/file2.png') + const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png') return assert.strictEqual(actual.toString(), expected.toString()) }) - it('should extract an archive', () => { + it('should extract an archive', async () => { asar.extractAll('test/input/extractthis.asar', 'tmp/extractthis-api/') return compDirs('tmp/extractthis-api/', 'test/expected/extractthis') }) - it('should extract an archive with unpacked files', () => { + it('should extract an archive with unpacked files', async () => { asar.extractAll('test/input/extractthis-unpack.asar', 'tmp/extractthis-unpack-api/') return compDirs('tmp/extractthis-unpack-api/', 'test/expected/extractthis') }) - it('should extract a binary file from archive with unpacked files', function () { + it('should extract a binary file from archive with unpacked files', async () => { const actual = asar.extractFile('test/input/extractthis-unpack-dir.asar', 'dir1/file1.txt') - const expected = fs.readFileSync('test/expected/extractthis/dir1/file1.txt') - return assert.strictEqual(actual.toString(), expected.toString()) + const expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt') + assert.strictEqual(actual.toString(), expected.toString()) }) - it('should extract an archive with unpacked dirs', () => { + it('should extract an archive with unpacked dirs', async () => { asar.extractAll('test/input/extractthis-unpack-dir.asar', 'tmp/extractthis-unpack-dir-api/') return compDirs('tmp/extractthis-unpack-dir-api/', 'test/expected/extractthis') }) - it('should handle multibyte characters in paths', () => { - return asar.createPackage('test/input/packthis-unicode-path/', 'tmp/packthis-unicode-path.asar') - .then(() => compFiles('tmp/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path.asar')) + it('should handle multibyte characters in paths', async () => { + await asar.createPackage('test/input/packthis-unicode-path/', 'tmp/packthis-unicode-path.asar') + return compFiles('tmp/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path.asar') }) - it('should extract a text file from archive with multibyte characters in path', function () { + it('should extract a text file from archive with multibyte characters in path', async () => { const actual = asar.extractFile('test/expected/packthis-unicode-path.asar', 'dir1/女の子.txt').toString('utf8') - let expected = fs.readFileSync('test/input/packthis-unicode-path/dir1/女の子.txt', 'utf8') + let expected = await fs.readFile('test/input/packthis-unicode-path/dir1/女の子.txt', 'utf8') return compFileLists(actual, expected) }) }) diff --git a/test/cli-spec.js b/test/cli-spec.js index 4d45f9cc..7752a537 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -1,157 +1,137 @@ 'use strict' -const { promisify } = require('util') - const assert = require('assert') const { exec } = require('mz/child_process') -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('../lib/wrapped-fs') const os = require('os') const path = require('path') -const rimraf = promisify(require('rimraf')) +const rimraf = require('rimraf') const compDirs = require('./util/compareDirectories') const compFileLists = require('./util/compareFileLists') const compFiles = require('./util/compareFiles') -function execAsar (args) { +async function execAsar (args) { return exec(`node bin/asar ${args}`) } -function assertAsarOutputMatches (args, expectedFilename) { - return Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) - .then(([stdout, expectedContents]) => compFileLists(stdout.join(''), `${expectedContents}\n`)) +async function assertAsarOutputMatches (args, expectedFilename) { + const [stdout, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) + return compFileLists(stdout.join(''), `${expectedContents}\n`) } describe('command line interface', function () { beforeEach(() => { rimraf.sync(path.join(__dirname, '..', 'tmp'), fs) }) - it('should create archive from directory', () => { - return execAsar('p test/input/packthis/ tmp/packthis-cli.asar') - .then(() => compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar')) + it('should create archive from directory', async () => { + await execAsar('p test/input/packthis/ tmp/packthis-cli.asar') + await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar') }) if (os.platform() === 'win32') { - it('should create archive from directory with windows-style path separators', () => { - return execAsar('p test\\input\\packthis\\ tmp\\packthis-cli.asar') - .then(() => compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar')) + it('should create archive from directory with windows-style path separators', async () => { + await execAsar('p test\\input\\packthis\\ tmp\\packthis-cli.asar') + await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar') }) } - it('should create archive from directory without hidden files', () => { - return execAsar('p test/input/packthis/ tmp/packthis-without-hidden-cli.asar --exclude-hidden') - .then(() => compFiles('tmp/packthis-without-hidden-cli.asar', 'test/expected/packthis-without-hidden.asar')) - }) - it('should create archive from directory with unpacked files', () => { - return execAsar('p test/input/packthis/ tmp/packthis-unpack-cli.asar --unpack *.png --exclude-hidden') - .then(() => { - assert.ok(fs.existsSync('tmp/packthis-unpack-cli.asar.unpacked/dir2/file2.png')) - return compFiles('tmp/packthis-unpack-cli.asar', 'test/expected/packthis-unpack.asar') - }) - }) - it('should list files/dirs in archive', () => { + it('should create archive from directory without hidden files', async () => { + await execAsar('p test/input/packthis/ tmp/packthis-without-hidden-cli.asar --exclude-hidden') + await compFiles('tmp/packthis-without-hidden-cli.asar', 'test/expected/packthis-without-hidden.asar') + }) + it('should create archive from directory with unpacked files', async () => { + await execAsar('p test/input/packthis/ tmp/packthis-unpack-cli.asar --unpack *.png --exclude-hidden') + assert.ok(fs.existsSync('tmp/packthis-unpack-cli.asar.unpacked/dir2/file2.png')) + await compFiles('tmp/packthis-unpack-cli.asar', 'test/expected/packthis-unpack.asar') + }) + it('should list files/dirs in archive', async () => { return assertAsarOutputMatches('l test/input/extractthis.asar', 'test/expected/extractthis-filelist.txt') }) - it('should list files/dirs in archive with unpacked files', () => { + it('should list files/dirs in archive with unpacked files', async () => { return assertAsarOutputMatches('l test/input/extractthis-unpack.asar', 'test/expected/extractthis-filelist.txt') }) - it('should list files/dirs with multibyte characters in path', () => { + it('should list files/dirs with multibyte characters in path', async () => { return assertAsarOutputMatches('l test/expected/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path-filelist.txt') }) // we need a way to set a path to extract to first, otherwise we pollute our project dir // or we fake it by setting our cwd, but I don't like that /* - it('should extract a text file from archive', () => { - return execAsar('ef test/input/extractthis.asar dir1/file1.txt') - .then(() => { - const actual = fs.readFileSync('tmp/file1.txt', 'utf8') - let expected = fs.readFileSync('test/expected/extractthis/dir1/file1.txt', 'utf8') - // on windows replace crlf with lf - if (os.platform() === 'win32') { - expected = expected.replace(/\r\n/g, '\n') - } - return assert.strictEqual(actual, expected) - }) + it('should extract a text file from archive', async () => { + await execAsar('ef test/input/extractthis.asar dir1/file1.txt') + const actual = await fs.readFile('tmp/file1.txt', 'utf8') + let expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt', 'utf8') + // on windows replace crlf with lf + if (os.platform() === 'win32') { + expected = expected.replace(/\r\n/g, '\n') + } + assert.strictEqual(actual, expected) }) - it('should extract a binary file from archive', () => { - return execAsar('ef test/input/extractthis.asar dir2/file2.png') - .then(() => { - const actual = fs.readFileSync('tmp/file2.png', 'utf8') - const expected = fs.readFileSync('test/expected/extractthis/dir2/file2.png', 'utf8') - assert.strictEqual(actual, expected) - }) + it('should extract a binary file from archive', async () => { + await execAsar('ef test/input/extractthis.asar dir2/file2.png') + const actual = await fs.readFile('tmp/file2.png', 'utf8') + const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png', 'utf8') + assert.strictEqual(actual, expected) }) */ - it('should extract an archive', () => { - return execAsar('e test/input/extractthis.asar tmp/extractthis-cli/') - .then(() => compDirs('tmp/extractthis-cli/', 'test/expected/extractthis')) - }) - it('should extract an archive with unpacked files', () => { - return execAsar('e test/input/extractthis-unpack.asar tmp/extractthis-unpack-cli/') - .then(() => compDirs('tmp/extractthis-unpack-cli/', 'test/expected/extractthis')) - }) - it('should create archive from directory with unpacked dirs', () => { - return execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-cli.asar --unpack-dir dir2 --exclude-hidden') - .then(() => { - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file3.txt')) - return compFiles('tmp/packthis-unpack-dir-cli.asar', 'test/expected/packthis-unpack-dir.asar') - }) - }) - it('should create archive from directory with unpacked dirs specified by glob pattern', () => { + it('should extract an archive', async () => { + await execAsar('e test/input/extractthis.asar tmp/extractthis-cli/') + return compDirs('tmp/extractthis-cli/', 'test/expected/extractthis') + }) + it('should extract an archive with unpacked files', async () => { + await execAsar('e test/input/extractthis-unpack.asar tmp/extractthis-unpack-cli/') + return compDirs('tmp/extractthis-unpack-cli/', 'test/expected/extractthis') + }) + it('should create archive from directory with unpacked dirs', async () => { + await execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-cli.asar --unpack-dir dir2 --exclude-hidden') + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file2.png')) + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file3.txt')) + return compFiles('tmp/packthis-unpack-dir-cli.asar', 'test/expected/packthis-unpack-dir.asar') + }) + it('should create archive from directory with unpacked dirs specified by glob pattern', async () => { const tmpFile = 'tmp/packthis-unpack-dir-glob-cli.asar' const tmpUnpacked = 'tmp/packthis-unpack-dir-glob-cli.asar.unpacked' - return execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "{x1,x2}" --exclude-hidden`) - .then(() => { - assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) - return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-glob.asar') - }) - }) - it('should create archive from directory with unpacked dirs specified by globstar pattern', () => { + await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "{x1,x2}" --exclude-hidden`) + assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) + assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) + return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-glob.asar') + }) + it('should create archive from directory with unpacked dirs specified by globstar pattern', async () => { const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar' const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked' - return execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "**/{x1,x2}" --exclude-hidden`) - .then(() => { - assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/y3/x1/file4.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/y3/z1/x2/file5.txt')) - return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-globstar.asar') - }) - }) - it('should create archive from directory with unpacked dirs specified by foo/{bar,baz} style pattern', () => { + await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "**/{x1,x2}" --exclude-hidden`) + assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) + assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) + assert.ok(fs.existsSync(tmpUnpacked + '/y3/x1/file4.txt')) + assert.ok(fs.existsSync(tmpUnpacked + '/y3/z1/x2/file5.txt')) + return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-globstar.asar') + }) + it('should create archive from directory with unpacked dirs specified by foo/{bar,baz} style pattern', async () => { const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar' const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked' - return execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "y3/{x1,z1}" --exclude-hidden`) - .then(() => { - assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/x1/file4.txt'))) - assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/z1/x2/file5.txt'))) - }) + await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "y3/{x1,z1}" --exclude-hidden`) + assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/x1/file4.txt'))) + assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/z1/x2/file5.txt'))) }) - it('should list files/dirs in archive with unpacked dirs', () => { + it('should list files/dirs in archive with unpacked dirs', async () => { return assertAsarOutputMatches('l test/expected/packthis-unpack-dir.asar', 'test/expected/extractthis-filelist.txt') }) - it('should list files/dirs in archive with unpacked dirs & is-pack option', () => { + it('should list files/dirs in archive with unpacked dirs & is-pack option', async () => { return assertAsarOutputMatches('l test/expected/packthis-unpack-dir.asar --is-pack', 'test/expected/extractthis-filelist-with-option.txt') }) - it('should extract an archive with unpacked dirs', () => { - return execAsar('e test/input/extractthis-unpack-dir.asar tmp/extractthis-unpack-dir/') - .then(() => compDirs('tmp/extractthis-unpack-dir/', 'test/expected/extractthis')) - }) - it('should create archive from directory with unpacked dirs and files', () => { - return execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-file-cli.asar --unpack *.png --unpack-dir dir2 --exclude-hidden') - .then(() => { - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file3.txt')) - return compFiles('tmp/packthis-unpack-dir-file-cli.asar', 'test/expected/packthis-unpack-dir.asar') - }) - }) - it('should create archive from directory with unpacked subdirs and files', () => { - return execAsar('p test/input/packthis-subdir/ tmp/packthis-unpack-subdir-cli.asar --unpack *.txt --unpack-dir dir2/subdir --exclude-hidden') - .then(() => { - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/file0.txt')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir1/file1.txt')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file3.txt')) - }) + it('should extract an archive with unpacked dirs', async () => { + await execAsar('e test/input/extractthis-unpack-dir.asar tmp/extractthis-unpack-dir/') + return compDirs('tmp/extractthis-unpack-dir/', 'test/expected/extractthis') + }) + it('should create archive from directory with unpacked dirs and files', async () => { + await execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-file-cli.asar --unpack *.png --unpack-dir dir2 --exclude-hidden') + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file2.png')) + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file3.txt')) + return compFiles('tmp/packthis-unpack-dir-file-cli.asar', 'test/expected/packthis-unpack-dir.asar') + }) + it('should create archive from directory with unpacked subdirs and files', async () => { + await execAsar('p test/input/packthis-subdir/ tmp/packthis-unpack-subdir-cli.asar --unpack *.txt --unpack-dir dir2/subdir --exclude-hidden') + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/file0.txt')) + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir1/file1.txt')) + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file2.png')) + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file3.txt')) }) }) diff --git a/test/util/compareDirectories.js b/test/util/compareDirectories.js index d29fa258..19a98edd 100644 --- a/test/util/compareDirectories.js +++ b/test/util/compareDirectories.js @@ -1,54 +1,49 @@ 'use strict' const _ = require('lodash') -const fs = process.versions.electron ? require('original-fs') : require('fs') +const fs = require('../../lib/wrapped-fs') const path = require('path') const crawlFilesystem = require('../../lib/crawlfs') -module.exports = function (dirA, dirB) { - return Promise.all([crawlFilesystem(dirA, null), crawlFilesystem(dirB, null)]) - .then(([[pathsA, metadataA], [pathsB, metadataB]]) => { - const relativeA = _.map(pathsA, pathAItem => path.relative(dirA, pathAItem)) - const relativeB = _.map(pathsB, pathBItem => path.relative(dirB, pathBItem)) - const onlyInA = _.difference(relativeA, relativeB) - const onlyInB = _.difference(relativeB, relativeA) - const inBoth = _.intersection(pathsA, pathsB) - const differentFiles = [] - const errorMsgBuilder = [] - for (const filename of inBoth) { - const typeA = metadataA[filename].type - const typeB = metadataB[filename].type - // skip if both are directories - if (typeA === 'directory' && typeB === 'directory') { - continue - } - // something is wrong if the types don't match up - if (typeA !== typeB) { - differentFiles.push(filename) - continue - } - const fileContentA = fs.readFileSync(path.join(dirA, filename), 'utf8') - const fileContentB = fs.readFileSync(path.join(dirB, filename), 'utf8') - if (fileContentA !== fileContentB) { - differentFiles.push(filename) - } - } - if (onlyInA.length) { - errorMsgBuilder.push(`\tEntries only in '${dirA}':`) - for (const file of onlyInA) { errorMsgBuilder.push(`\t ${file}`) } - } - if (onlyInB.length) { - errorMsgBuilder.push(`\tEntries only in '${dirB}':`) - for (const file of onlyInB) { errorMsgBuilder.push(`\t ${file}`) } - } - if (differentFiles.length) { - errorMsgBuilder.push('\tDifferent file content:') - for (const file of differentFiles) { errorMsgBuilder.push(`\t ${file}`) } - } - if (errorMsgBuilder.length) { - throw new Error('\n' + errorMsgBuilder.join('\n')) - } - - return Promise.resolve() - }) +module.exports = async function (dirA, dirB) { + const [[pathsA, metadataA], [pathsB, metadataB]] = await Promise.all([crawlFilesystem(dirA, null), crawlFilesystem(dirB, null)]) + const relativeA = _.map(pathsA, pathAItem => path.relative(dirA, pathAItem)) + const relativeB = _.map(pathsB, pathBItem => path.relative(dirB, pathBItem)) + const onlyInA = _.difference(relativeA, relativeB) + const onlyInB = _.difference(relativeB, relativeA) + const inBoth = _.intersection(pathsA, pathsB) + const differentFiles = [] + const errorMsgBuilder = [] + for (const filename of inBoth) { + const typeA = metadataA[filename].type + const typeB = metadataB[filename].type + // skip if both are directories + if (typeA === 'directory' && typeB === 'directory') { + continue + } + // something is wrong if the types don't match up + if (typeA !== typeB) { + differentFiles.push(filename) + continue + } + const [fileContentA, fileContentB] = Promise.all([dirA, dirB].map(dir => fs.readFile(path.join(dir, filename), 'utf8'))) + if (fileContentA !== fileContentB) { + differentFiles.push(filename) + } + } + if (onlyInA.length) { + errorMsgBuilder.push(`\tEntries only in '${dirA}':`) + for (const file of onlyInA) { errorMsgBuilder.push(`\t ${file}`) } + } + if (onlyInB.length) { + errorMsgBuilder.push(`\tEntries only in '${dirB}':`) + for (const file of onlyInB) { errorMsgBuilder.push(`\t ${file}`) } + } + if (differentFiles.length) { + errorMsgBuilder.push('\tDifferent file content:') + for (const file of differentFiles) { errorMsgBuilder.push(`\t ${file}`) } + } + if (errorMsgBuilder.length) { + throw new Error('\n' + errorMsgBuilder.join('\n')) + } } diff --git a/test/util/compareFiles.js b/test/util/compareFiles.js index ad5b02d6..38620796 100644 --- a/test/util/compareFiles.js +++ b/test/util/compareFiles.js @@ -1,11 +1,9 @@ 'use strict' -const { promisify } = require('util') - const assert = require('assert') -const fs = promisify(process.versions.electron ? require('original-fs') : require('fs')) +const fs = require('../../lib/wrapped-fs') -module.exports = function (actualFilePath, expectedFilePath) { - return Promise.all([fs.readFile(actualFilePath, 'utf8'), fs.readFile(expectedFilePath, 'utf8')]) - .then(([actual, expected]) => assert.strictEqual(actual, expected)) +module.exports = async function (actualFilePath, expectedFilePath) { + const [actual, expected] = await Promise.all([fs.readFile(actualFilePath, 'utf8'), fs.readFile(expectedFilePath, 'utf8')]) + assert.strictEqual(actual, expected) } From 275693b914b03b5875fc8fe7e4f7ccf6ae67ecf6 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Wed, 17 Apr 2019 21:53:24 -0700 Subject: [PATCH 07/16] chore: remove Node 6 from the Node versions tested in CI --- appveyor.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 49df57a8..5c440d8d 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -8,7 +8,6 @@ skip_tags: true environment: matrix: - - nodejs_version: "6" - nodejs_version: "8" - nodejs_version: "10" From 9aeef0e30c1a0265be59cd915570b9be03790cad Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Sat, 20 Apr 2019 08:09:58 -0700 Subject: [PATCH 08/16] refactor: use async readFile method --- lib/asar.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/asar.js b/lib/asar.js index 3742639d..25b38a1e 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -65,7 +65,7 @@ module.exports.createPackageFromFiles = async function (src, dest, filenames, me let filenamesSorted = [] if (options.ordering) { - const orderingFiles = fs.readFileSync(options.ordering).toString().split('\n').map(function (line) { + const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => { if (line.includes(':')) { line = line.split(':').pop() } line = line.trim() if (line.startsWith('/')) { line = line.slice(1) } From 2bbec92ac941dd57f6c323fff1f846a6cb734e74 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Sat, 20 Apr 2019 08:11:23 -0700 Subject: [PATCH 09/16] refactor: simplify pattern construction --- lib/asar.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/asar.js b/lib/asar.js index 25b38a1e..3840af8a 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -34,10 +34,7 @@ module.exports.createPackageWithOptions = async function (src, dest, options) { const globOptions = options.globOptions ? options.globOptions : {} globOptions.dot = options.dot === undefined ? true : options.dot - let pattern = src + '/**/*' - if (options.pattern) { - pattern = src + options.pattern - } + const pattern = src + (options.pattern ? options.pattern : '/**/*') const [filenames, metadata] = await crawlFilesystem(pattern, globOptions) return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options) From 25df645baa6019e6dc3440fda96bd1021edc50f4 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Sat, 20 Apr 2019 08:15:08 -0700 Subject: [PATCH 10/16] docs: reformat createPackageFromFiles doc comment --- lib/asar.js | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lib/asar.js b/lib/asar.js index 3840af8a..307c26ff 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -40,13 +40,14 @@ module.exports.createPackageWithOptions = async function (src, dest, options) { return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options) } -/* -createPackageFromFiles - Create an asar-archive from a list of filenames -src: Base path. All files are relative to this. -dest: Archive filename (& path). -filenames: Array of filenames relative to src. -metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) -options: The options. +/** + * Create an ASAR archive from a list of filenames. + * + * @param {string} src: Base path. All files are relative to this. + * @param {string} dest: Archive filename (& path). + * @param {array} filenames: List of filenames relative to src. + * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) + * @param {object} options: Options passed to `createPackageWithOptions`. */ module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) { if (typeof metadata === 'undefined' || metadata === null) { metadata = {} } From e4f93531df099dd26cc70b72959d2e1a889b3335 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Sat, 20 Apr 2019 08:55:36 -0700 Subject: [PATCH 11/16] refactor: DRY up determineFileType --- lib/asar.js | 12 +++--------- lib/crawlfs.js | 11 +++++------ 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/lib/asar.js b/lib/asar.js index 307c26ff..4f3cda41 100644 --- a/lib/asar.js +++ b/lib/asar.js @@ -102,16 +102,10 @@ module.exports.createPackageFromFiles = async function (src, dest, filenames, me } const handleFile = async function (filename) { - let file = metadata[filename] - let type - if (!file) { - const stat = await fs.lstat(filename) - if (stat.isDirectory()) { type = 'directory' } - if (stat.isFile()) { type = 'file' } - if (stat.isSymbolicLink()) { type = 'link' } - file = { stat, type } - metadata[filename] = file + if (!metadata[filename]) { + metadata[filename] = await crawlFilesystem.determineFileType(filename) } + const file = metadata[filename] let shouldUnpack switch (file.type) { diff --git a/lib/crawlfs.js b/lib/crawlfs.js index cff46533..cc4a7966 100644 --- a/lib/crawlfs.js +++ b/lib/crawlfs.js @@ -8,20 +8,18 @@ const glob = promisify(require('glob')) async function determineFileType (filename) { const stat = await fs.lstat(filename) if (stat.isFile()) { - return [filename, { type: 'file', stat: stat }] + return { type: 'file', stat } } else if (stat.isDirectory()) { - return [filename, { type: 'directory', stat: stat }] + return { type: 'directory', stat } } else if (stat.isSymbolicLink()) { - return [filename, { type: 'link', stat: stat }] + return { type: 'link', stat } } - - return [filename, undefined] } module.exports = async function (dir, options) { const metadata = {} const crawled = await glob(dir, options) - const results = await Promise.all(crawled.map(filename => determineFileType(filename))) + const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)])) const filenames = results.map(([filename, type]) => { if (type) { metadata[filename] = type @@ -30,3 +28,4 @@ module.exports = async function (dir, options) { }) return [filenames, metadata] } +module.exports.determineFileType = determineFileType From 70bf7902a5f1e8db0afcc7e8179682610893a8e2 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Sat, 20 Apr 2019 08:57:22 -0700 Subject: [PATCH 12/16] refactor: better use of async/await syntax --- lib/filesystem.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/filesystem.js b/lib/filesystem.js index 559f3887..e71a310f 100644 --- a/lib/filesystem.js +++ b/lib/filesystem.js @@ -89,10 +89,9 @@ class Filesystem { stream.pipe(transformed).pipe(out) return out.on('close', async () => { - const stat = await fs.lstat(tmpfile.path) file.transformed = { path: tmpfile.path, - stat + stat: await fs.lstat(tmpfile.path) } return handler(resolve, reject) }) From 6b23b479386ecd5f2ca781252129564054212824 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Thu, 25 Apr 2019 17:28:39 -0700 Subject: [PATCH 13/16] chore: upgrade electron & mocha --- package-lock.json | 36 +++++++++++++++++------------------- package.json | 4 ++-- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4522cbe0..dc77a42c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4998,9 +4998,9 @@ } }, "electron": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/electron/-/electron-4.1.5.tgz", - "integrity": "sha512-0VZzUd4vZaUeSLdxJI/XMrMnPN7AROjPFZOiNgZZkYRUUEjGHfaSAbCJyxuXtii52KGhzGL0JgW0q5QmQ3ykKQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/electron/-/electron-5.0.0.tgz", + "integrity": "sha512-++emIe4vLihiYiAVL+E8DT5vSNVFEIuQCRxA+VfpDRVBcog85UB28vi4ogRmMOK3UffzKdWV6e1jqp3T0KpBoA==", "dev": true, "requires": { "@types/node": "^10.12.18", @@ -6267,6 +6267,14 @@ "ignore": "^4.0.3", "pify": "^4.0.1", "slash": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + } } }, "graceful-fs": { @@ -7034,14 +7042,6 @@ "pify": "^2.0.0", "pinkie-promise": "^2.0.0", "strip-bom": "^2.0.0" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - } } }, "locate-path": { @@ -11477,14 +11477,6 @@ "graceful-fs": "^4.1.2", "pify": "^2.0.0", "pinkie-promise": "^2.0.0" - }, - "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", - "dev": true - } } }, "pend": { @@ -11499,6 +11491,12 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", "dev": true }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", diff --git a/package.json b/package.json index e25ead58..8964ad0f 100644 --- a/package.json +++ b/package.json @@ -38,10 +38,10 @@ }, "devDependencies": { "@continuous-auth/semantic-release-npm": "^1.0.3", - "electron": "^4.1.4", + "electron": "^5.0.0", "electron-mocha": "^8.0.1", "lodash": "^4.17.11", - "mocha": "^6.1.3", + "mocha": "^6.1.4", "mz": "^2.7.0", "rimraf": "^2.6.3", "semantic-release": "^15.13.3", From 0f3956a39ece1c302b12c42e4350857b47a67fe8 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Fri, 26 Apr 2019 18:11:26 -0700 Subject: [PATCH 14/16] chore: remove mz devDependency --- package-lock.json | 35 ----------------------------------- package.json | 1 - test/cli-spec.js | 7 +++++-- 3 files changed, 5 insertions(+), 38 deletions(-) diff --git a/package-lock.json b/package-lock.json index dc77a42c..b5af0149 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3544,12 +3544,6 @@ "integrity": "sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk=", "dev": true }, - "any-promise": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", - "integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8=", - "dev": true - }, "argparse": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", @@ -7562,17 +7556,6 @@ "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", "dev": true }, - "mz": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", - "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", - "dev": true, - "requires": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" - } - }, "nanomatch": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", @@ -12785,24 +12768,6 @@ "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", "dev": true }, - "thenify": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.0.tgz", - "integrity": "sha1-5p44obq+lpsBCCB5eLn2K4hgSDk=", - "dev": true, - "requires": { - "any-promise": "^1.0.0" - } - }, - "thenify-all": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", - "integrity": "sha1-GhkY1ALY/D+Y+/I02wvMjMEOlyY=", - "dev": true, - "requires": { - "thenify": ">= 3.1.0 < 4" - } - }, "throttleit": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-0.0.2.tgz", diff --git a/package.json b/package.json index 8964ad0f..b51e2911 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,6 @@ "electron-mocha": "^8.0.1", "lodash": "^4.17.11", "mocha": "^6.1.4", - "mz": "^2.7.0", "rimraf": "^2.6.3", "semantic-release": "^15.13.3", "standard": "^12.0.1", diff --git a/test/cli-spec.js b/test/cli-spec.js index 7752a537..c269b33d 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -1,18 +1,21 @@ 'use strict' const assert = require('assert') -const { exec } = require('mz/child_process') +const childProcess = require('child_process') const fs = require('../lib/wrapped-fs') const os = require('os') const path = require('path') +const { promisify } = require('util') const rimraf = require('rimraf') const compDirs = require('./util/compareDirectories') const compFileLists = require('./util/compareFileLists') const compFiles = require('./util/compareFiles') +childProcess.exec = promisify(childProcess.exec) + async function execAsar (args) { - return exec(`node bin/asar ${args}`) + return childProcess.exec(`node bin/asar ${args}`) } async function assertAsarOutputMatches (args, expectedFilename) { From dbc8e8f02371cc5ceedddeb479bbece5d321911e Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Fri, 26 Apr 2019 18:19:51 -0700 Subject: [PATCH 15/16] Fix return value of execAsar --- test/cli-spec.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/cli-spec.js b/test/cli-spec.js index c269b33d..3e623987 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -19,8 +19,8 @@ async function execAsar (args) { } async function assertAsarOutputMatches (args, expectedFilename) { - const [stdout, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) - return compFileLists(stdout.join(''), `${expectedContents}\n`) + const [{ stdout, stderr }, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) + return compFileLists(stdout, `${expectedContents}\n`) } describe('command line interface', function () { From 5929390c081361f6e934aacfea59c44ae2be08e7 Mon Sep 17 00:00:00 2001 From: Mark Lee Date: Fri, 26 Apr 2019 18:25:55 -0700 Subject: [PATCH 16/16] Remove unused variable --- test/cli-spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/cli-spec.js b/test/cli-spec.js index 3e623987..2110a6d8 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -19,7 +19,7 @@ async function execAsar (args) { } async function assertAsarOutputMatches (args, expectedFilename) { - const [{ stdout, stderr }, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) + const [{ stdout }, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) return compFileLists(stdout, `${expectedContents}\n`) }