From 8dce62c7fe56206dd8f6c111c7aaa05f9f98d615 Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Tue, 22 Nov 2022 10:15:39 -0500 Subject: [PATCH 001/191] deps: V8: cherry-pick 5fe919f78321 Original commit message: PPC: clear VXCVI before doing a conversion This bit may not get cleared automatically and could show results from older executed instructions. Change-Id: I5976f9a6c5bf87b1a63ef0f35493b222729e20f6 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3812037 Reviewed-by: Junliang Yan Commit-Queue: Milad Farazmand Cr-Commit-Position: refs/heads/main@{#82237} Refs: https://github.com/v8/v8/commit/5fe919f783214c978cb174425554ede8fc1eac5f PR-URL: https://github.com/nodejs/node/pull/45587 Reviewed-By: Michael Dawson Reviewed-By: Jiawen Geng --- common.gypi | 2 +- deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/common.gypi b/common.gypi index f3e47909677639..5372c8d42de531 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.21', + 'v8_embedder_string': '-node.22', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h index 796ddaa4d17385..31ce3e5a8f407d 100644 --- a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h +++ b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h @@ -1455,6 +1455,7 @@ bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode, fcmpu(src.fp(), kScratchDoubleReg); bunordered(trap); + mtfsb0(VXCVI); // clear FPSCR:VXCVI bit fctiwz(kScratchDoubleReg, src.fp()); MovDoubleLowToInt(dst.gp(), kScratchDoubleReg); mcrfs(cr7, VXCVI); @@ -1463,6 +1464,7 @@ bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode, } case kExprI32UConvertF64: case kExprI32UConvertF32: { + mtfsb0(VXCVI); // clear FPSCR:VXCVI bit ConvertDoubleToUnsignedInt64(src.fp(), r0, kScratchDoubleReg, kRoundToZero); mcrfs(cr7, VXCVI); // extract FPSCR field containing VXCVI into cr7 @@ -1478,6 +1480,7 @@ bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode, fcmpu(src.fp(), kScratchDoubleReg); bunordered(trap); + mtfsb0(VXCVI); // clear FPSCR:VXCVI bit fctidz(kScratchDoubleReg, src.fp()); MovDoubleToInt64(dst.gp(), kScratchDoubleReg); mcrfs(cr7, VXCVI); @@ -1490,6 +1493,7 @@ bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode, fcmpu(src.fp(), kScratchDoubleReg); bunordered(trap); + mtfsb0(VXCVI); // clear FPSCR:VXCVI bit fctiduz(kScratchDoubleReg, src.fp()); MovDoubleToInt64(dst.gp(), kScratchDoubleReg); mcrfs(cr7, VXCVI); From 4844935ff3e98af6b31e4a773beab06fa94d915d Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Tue, 6 Dec 2022 22:18:33 -0500 Subject: [PATCH 002/191] deps: upgrade npm to 9.1.3 PR-URL: https://github.com/nodejs/node/pull/45693 Backport-PR-URL: https://github.com/nodejs/node/pull/46230 Reviewed-By: Rich Trott Reviewed-By: Luigi Pinca Reviewed-By: Ruy Adorno --- deps/npm/README.md | 6 +- deps/npm/bin/npx-cli.js | 2 + deps/npm/docs/README.md | 5 + deps/npm/docs/content/commands/npm-access.md | 32 +- deps/npm/docs/content/commands/npm-adduser.md | 29 +- deps/npm/docs/content/commands/npm-audit.md | 16 +- deps/npm/docs/content/commands/npm-bin.md | 41 - deps/npm/docs/content/commands/npm-bugs.md | 5 +- deps/npm/docs/content/commands/npm-cache.md | 3 +- deps/npm/docs/content/commands/npm-ci.md | 44 +- deps/npm/docs/content/commands/npm-config.md | 17 +- deps/npm/docs/content/commands/npm-dedupe.md | 44 +- .../npm/docs/content/commands/npm-dist-tag.md | 4 +- deps/npm/docs/content/commands/npm-docs.md | 8 +- deps/npm/docs/content/commands/npm-edit.md | 4 +- deps/npm/docs/content/commands/npm-exec.md | 18 +- .../docs/content/commands/npm-find-dupes.md | 44 +- deps/npm/docs/content/commands/npm-fund.md | 10 +- .../content/commands/npm-install-ci-test.md | 44 +- .../docs/content/commands/npm-install-test.md | 44 +- deps/npm/docs/content/commands/npm-install.md | 46 +- deps/npm/docs/content/commands/npm-link.md | 44 +- deps/npm/docs/content/commands/npm-login.md | 87 + deps/npm/docs/content/commands/npm-ls.md | 10 +- deps/npm/docs/content/commands/npm-pkg.md | 3 +- deps/npm/docs/content/commands/npm-prune.md | 8 +- deps/npm/docs/content/commands/npm-publish.md | 20 +- deps/npm/docs/content/commands/npm-rebuild.md | 8 +- deps/npm/docs/content/commands/npm-repo.md | 7 +- .../docs/content/commands/npm-run-script.md | 9 +- .../docs/content/commands/npm-set-script.md | 91 - .../docs/content/commands/npm-uninstall.md | 8 +- deps/npm/docs/content/commands/npm-update.md | 50 +- deps/npm/docs/content/commands/npm-version.md | 14 +- deps/npm/docs/content/commands/npm-view.md | 6 + deps/npm/docs/content/commands/npm.md | 2 +- .../docs/content/configuring-npm/folders.md | 50 +- .../npm/docs/content/configuring-npm/npmrc.md | 9 + .../content/configuring-npm/package-json.md | 42 +- deps/npm/docs/content/using-npm/config.md | 235 +- .../content/using-npm/dependency-selectors.md | 43 +- deps/npm/docs/content/using-npm/logging.md | 21 +- .../docs/content/using-npm/package-spec.md | 6 +- deps/npm/docs/content/using-npm/registry.md | 13 +- deps/npm/docs/content/using-npm/scope.md | 2 +- deps/npm/docs/content/using-npm/scripts.md | 6 +- deps/npm/docs/content/using-npm/workspaces.md | 5 + deps/npm/docs/output/commands/npm-access.html | 36 +- .../npm/docs/output/commands/npm-adduser.html | 29 +- deps/npm/docs/output/commands/npm-audit.html | 45 +- deps/npm/docs/output/commands/npm-bugs.html | 10 +- deps/npm/docs/output/commands/npm-cache.html | 7 +- deps/npm/docs/output/commands/npm-ci.html | 56 +- .../docs/output/commands/npm-completion.html | 7 +- deps/npm/docs/output/commands/npm-config.html | 30 +- deps/npm/docs/output/commands/npm-dedupe.html | 52 +- .../docs/output/commands/npm-deprecate.html | 9 +- deps/npm/docs/output/commands/npm-diff.html | 19 +- .../docs/output/commands/npm-dist-tag.html | 13 +- deps/npm/docs/output/commands/npm-docs.html | 13 +- deps/npm/docs/output/commands/npm-doctor.html | 5 +- deps/npm/docs/output/commands/npm-edit.html | 9 +- deps/npm/docs/output/commands/npm-exec.html | 25 +- .../npm/docs/output/commands/npm-explain.html | 9 +- .../npm/docs/output/commands/npm-explore.html | 7 +- .../docs/output/commands/npm-find-dupes.html | 50 +- deps/npm/docs/output/commands/npm-fund.html | 19 +- .../docs/output/commands/npm-help-search.html | 5 +- deps/npm/docs/output/commands/npm-help.html | 5 +- deps/npm/docs/output/commands/npm-hook.html | 19 +- deps/npm/docs/output/commands/npm-init.html | 17 +- .../output/commands/npm-install-ci-test.html | 50 +- .../output/commands/npm-install-test.html | 50 +- .../npm/docs/output/commands/npm-install.html | 90 +- deps/npm/docs/output/commands/npm-link.html | 58 +- .../commands/{npm-bin.html => npm-login.html} | 74 +- deps/npm/docs/output/commands/npm-logout.html | 5 +- deps/npm/docs/output/commands/npm-ls.html | 15 +- deps/npm/docs/output/commands/npm-org.html | 17 +- .../docs/output/commands/npm-outdated.html | 9 +- deps/npm/docs/output/commands/npm-owner.html | 5 +- deps/npm/docs/output/commands/npm-pack.html | 5 +- deps/npm/docs/output/commands/npm-ping.html | 9 +- deps/npm/docs/output/commands/npm-pkg.html | 36 +- deps/npm/docs/output/commands/npm-prefix.html | 9 +- .../npm/docs/output/commands/npm-profile.html | 7 +- deps/npm/docs/output/commands/npm-prune.html | 13 +- .../npm/docs/output/commands/npm-publish.html | 23 +- deps/npm/docs/output/commands/npm-query.html | 11 +- .../npm/docs/output/commands/npm-rebuild.html | 13 +- deps/npm/docs/output/commands/npm-repo.html | 12 +- .../npm/docs/output/commands/npm-restart.html | 5 +- deps/npm/docs/output/commands/npm-root.html | 7 +- .../docs/output/commands/npm-run-script.html | 20 +- deps/npm/docs/output/commands/npm-search.html | 5 +- .../docs/output/commands/npm-set-script.html | 232 -- .../docs/output/commands/npm-shrinkwrap.html | 5 +- deps/npm/docs/output/commands/npm-star.html | 5 +- deps/npm/docs/output/commands/npm-stars.html | 5 +- deps/npm/docs/output/commands/npm-start.html | 9 +- deps/npm/docs/output/commands/npm-stop.html | 9 +- deps/npm/docs/output/commands/npm-team.html | 17 +- deps/npm/docs/output/commands/npm-test.html | 9 +- deps/npm/docs/output/commands/npm-token.html | 9 +- .../docs/output/commands/npm-uninstall.html | 17 +- .../docs/output/commands/npm-unpublish.html | 7 +- deps/npm/docs/output/commands/npm-unstar.html | 5 +- deps/npm/docs/output/commands/npm-update.html | 70 +- .../npm/docs/output/commands/npm-version.html | 25 +- deps/npm/docs/output/commands/npm-view.html | 28 +- deps/npm/docs/output/commands/npm-whoami.html | 5 +- deps/npm/docs/output/commands/npm.html | 7 +- deps/npm/docs/output/commands/npx.html | 5 +- .../docs/output/configuring-npm/folders.html | 56 +- .../docs/output/configuring-npm/install.html | 3 +- .../configuring-npm/npm-shrinkwrap-json.html | 3 +- .../docs/output/configuring-npm/npmrc.html | 19 +- .../output/configuring-npm/package-json.html | 140 +- .../configuring-npm/package-lock-json.html | 3 +- deps/npm/docs/output/using-npm/config.html | 220 +- .../using-npm/dependency-selectors.html | 59 +- .../npm/docs/output/using-npm/developers.html | 15 +- deps/npm/docs/output/using-npm/logging.html | 22 +- deps/npm/docs/output/using-npm/orgs.html | 21 +- .../docs/output/using-npm/package-spec.html | 9 +- deps/npm/docs/output/using-npm/registry.html | 18 +- deps/npm/docs/output/using-npm/removal.html | 13 +- deps/npm/docs/output/using-npm/scope.html | 17 +- deps/npm/docs/output/using-npm/scripts.html | 23 +- .../npm/docs/output/using-npm/workspaces.html | 8 +- deps/npm/lib/arborist-cmd.js | 14 + deps/npm/lib/auth/legacy.js | 100 - deps/npm/lib/auth/oauth.js | 8 - deps/npm/lib/auth/saml.js | 8 - deps/npm/lib/auth/sso.js | 81 - deps/npm/lib/base-command.js | 8 + deps/npm/lib/cli.js | 119 +- deps/npm/lib/commands/access.js | 284 +- deps/npm/lib/commands/adduser.js | 65 +- deps/npm/lib/commands/audit.js | 2 +- deps/npm/lib/commands/bin.js | 23 - deps/npm/lib/commands/birthday.js | 17 - deps/npm/lib/commands/cache.js | 3 +- deps/npm/lib/commands/ci.js | 1 - deps/npm/lib/commands/completion.js | 2 +- deps/npm/lib/commands/config.js | 84 +- deps/npm/lib/commands/dedupe.js | 3 +- deps/npm/lib/commands/deprecate.js | 8 +- deps/npm/lib/commands/diff.js | 4 +- deps/npm/lib/commands/explore.js | 1 - deps/npm/lib/commands/find-dupes.js | 3 +- deps/npm/lib/commands/hook.js | 3 - deps/npm/lib/commands/init.js | 13 +- deps/npm/lib/commands/install.js | 4 +- deps/npm/lib/commands/link.js | 5 +- deps/npm/lib/commands/login.js | 53 + deps/npm/lib/commands/ls.js | 2 +- deps/npm/lib/commands/publish.js | 8 +- deps/npm/lib/commands/query.js | 5 +- deps/npm/lib/commands/rebuild.js | 2 +- deps/npm/lib/commands/run-script.js | 1 - deps/npm/lib/commands/set-script.js | 96 - deps/npm/lib/commands/unpublish.js | 4 +- deps/npm/lib/commands/update.js | 3 +- deps/npm/lib/commands/view.js | 5 +- deps/npm/lib/npm.js | 79 +- deps/npm/lib/package-url-cmd.js | 2 + deps/npm/lib/utils/auth.js | 78 + deps/npm/lib/utils/cmd-list.js | 7 +- .../lib/utils/completion/installed-shallow.js | 8 +- deps/npm/lib/utils/config/definitions.js | 225 +- deps/npm/lib/utils/display.js | 50 +- deps/npm/lib/utils/error-message.js | 10 +- deps/npm/lib/utils/exit-handler.js | 39 +- deps/npm/lib/utils/explain-eresolve.js | 35 +- deps/npm/lib/utils/get-identity.js | 4 +- deps/npm/lib/utils/log-file.js | 72 +- deps/npm/lib/utils/npm-usage.js | 56 +- deps/npm/lib/utils/open-url-prompt.js | 12 +- deps/npm/lib/utils/open-url.js | 18 +- deps/npm/lib/utils/otplease.js | 6 +- deps/npm/lib/utils/tar.js | 4 +- deps/npm/lib/utils/timers.js | 46 +- deps/npm/lib/utils/update-notifier.js | 4 +- deps/npm/man/man1/npm-access.1 | 165 +- deps/npm/man/man1/npm-adduser.1 | 101 +- deps/npm/man/man1/npm-audit.1 | 534 ++-- deps/npm/man/man1/npm-bin.1 | 52 - deps/npm/man/man1/npm-bugs.1 | 141 +- deps/npm/man/man1/npm-cache.1 | 123 +- deps/npm/man/man1/npm-ci.1 | 409 ++- deps/npm/man/man1/npm-completion.1 | 39 +- deps/npm/man/man1/npm-config.1 | 206 +- deps/npm/man/man1/npm-dedupe.1 | 337 +-- deps/npm/man/man1/npm-deprecate.1 | 77 +- deps/npm/man/man1/npm-diff.1 | 386 ++- deps/npm/man/man1/npm-dist-tag.1 | 179 +- deps/npm/man/man1/npm-docs.1 | 140 +- deps/npm/man/man1/npm-doctor.1 | 146 +- deps/npm/man/man1/npm-edit.1 | 53 +- deps/npm/man/man1/npm-exec.1 | 436 ++- deps/npm/man/man1/npm-explain.1 | 126 +- deps/npm/man/man1/npm-explore.1 | 53 +- deps/npm/man/man1/npm-find-dupes.1 | 268 +- deps/npm/man/man1/npm-fund.1 | 177 +- deps/npm/man/man1/npm-help-search.1 | 42 +- deps/npm/man/man1/npm-help.1 | 54 +- deps/npm/man/man1/npm-hook.1 | 91 +- deps/npm/man/man1/npm-init.1 | 374 +-- deps/npm/man/man1/npm-install-ci-test.1 | 353 +-- deps/npm/man/man1/npm-install-test.1 | 354 +-- deps/npm/man/man1/npm-install.1 | 968 +++---- deps/npm/man/man1/npm-link.1 | 435 ++- deps/npm/man/man1/npm-login.1 | 95 + deps/npm/man/man1/npm-logout.1 | 70 +- deps/npm/man/man1/npm-ls.1 | 357 +-- deps/npm/man/man1/npm-org.1 | 101 +- deps/npm/man/man1/npm-outdated.1 | 245 +- deps/npm/man/man1/npm-owner.1 | 142 +- deps/npm/man/man1/npm-pack.1 | 161 +- deps/npm/man/man1/npm-ping.1 | 44 +- deps/npm/man/man1/npm-pkg.1 | 320 +-- deps/npm/man/man1/npm-prefix.1 | 67 +- deps/npm/man/man1/npm-profile.1 | 178 +- deps/npm/man/man1/npm-prune.1 | 218 +- deps/npm/man/man1/npm-publish.1 | 288 +- deps/npm/man/man1/npm-query.1 | 201 +- deps/npm/man/man1/npm-rebuild.1 | 192 +- deps/npm/man/man1/npm-repo.1 | 129 +- deps/npm/man/man1/npm-restart.1 | 96 +- deps/npm/man/man1/npm-root.1 | 59 +- deps/npm/man/man1/npm-run-script.1 | 268 +- deps/npm/man/man1/npm-search.1 | 189 +- deps/npm/man/man1/npm-set-script.1 | 116 - deps/npm/man/man1/npm-shrinkwrap.1 | 44 +- deps/npm/man/man1/npm-star.1 | 84 +- deps/npm/man/man1/npm-stars.1 | 47 +- deps/npm/man/man1/npm-start.1 | 81 +- deps/npm/man/man1/npm-stop.1 | 74 +- deps/npm/man/man1/npm-team.1 | 166 +- deps/npm/man/man1/npm-test.1 | 71 +- deps/npm/man/man1/npm-token.1 | 169 +- deps/npm/man/man1/npm-uninstall.1 | 164 +- deps/npm/man/man1/npm-unpublish.1 | 190 +- deps/npm/man/man1/npm-unstar.1 | 78 +- deps/npm/man/man1/npm-update.1 | 469 ++- deps/npm/man/man1/npm-version.1 | 274 +- deps/npm/man/man1/npm-view.1 | 196 +- deps/npm/man/man1/npm-whoami.1 | 44 +- deps/npm/man/man1/npm.1 | 215 +- deps/npm/man/man1/npx.1 | 206 +- deps/npm/man/man5/folders.5 | 306 +- deps/npm/man/man5/install.5 | 119 +- deps/npm/man/man5/npm-global.5 | 306 +- deps/npm/man/man5/npm-json.5 | 1082 +++---- deps/npm/man/man5/npm-shrinkwrap-json.5 | 42 +- deps/npm/man/man5/npmrc.5 | 147 +- deps/npm/man/man5/package-json.5 | 1082 +++---- deps/npm/man/man5/package-lock-json.5 | 337 +-- deps/npm/man/man7/config.7 | 2549 ++++++++--------- deps/npm/man/man7/dependency-selectors.7 | 388 +-- deps/npm/man/man7/developers.7 | 335 +-- deps/npm/man/man7/logging.7 | 150 +- deps/npm/man/man7/orgs.7 | 107 +- deps/npm/man/man7/package-spec.7 | 191 +- deps/npm/man/man7/registry.7 | 91 +- deps/npm/man/man7/removal.7 | 51 +- deps/npm/man/man7/scope.7 | 142 +- deps/npm/man/man7/scripts.7 | 586 ++-- deps/npm/man/man7/workspaces.7 | 214 +- .../@npmcli/arborist/bin/lib/logging.js | 3 +- .../@npmcli/arborist/lib/add-rm-pkg-deps.js | 4 +- .../arborist/lib/arborist/build-ideal-tree.js | 45 +- .../@npmcli/arborist/lib/arborist/index.js | 1 + .../arborist/lib/arborist/load-actual.js | 436 ++- .../@npmcli/arborist/lib/arborist/rebuild.js | 1 - .../@npmcli/arborist/lib/arborist/reify.js | 50 +- .../arborist/lib/consistent-resolve.js | 28 +- .../@npmcli/arborist/lib/dep-valid.js | 4 +- .../node_modules/@npmcli/arborist/lib/edge.js | 2 +- .../node_modules/@npmcli/arborist/lib/link.js | 22 - .../node_modules/@npmcli/arborist/lib/node.js | 7 +- .../@npmcli/arborist/lib/override-set.js | 11 +- .../@npmcli/arborist/lib/place-dep.js | 228 +- .../arborist/lib/query-selector-all.js | 329 ++- .../@npmcli/arborist/lib/realpath.js | 5 +- .../@npmcli/arborist/lib/shrinkwrap.js | 75 +- .../@npmcli/arborist/lib/yarn-lock.js | 54 +- .../@npmcli/arborist/package.json | 62 +- .../node_modules/@npmcli/ci-detect/LICENSE | 15 - .../@npmcli/ci-detect/lib/index.js | 51 - .../@npmcli/ci-detect/package.json | 41 - .../npm/node_modules/@npmcli/config/README.md | 260 ++ .../node_modules/@npmcli/config/lib/errors.js | 22 + .../node_modules/@npmcli/config/lib/index.js | 230 +- .../node_modules/@npmcli/config/package.json | 35 +- .../@npmcli/disparity-colors/package.json | 17 +- .../@npmcli/fs/lib/{ => cp}/errors.js | 0 .../node_modules/@npmcli/fs/lib/cp/index.js | 2 +- .../@npmcli/fs/lib/cp/polyfill.js | 4 +- deps/npm/node_modules/@npmcli/fs/lib/index.js | 21 +- .../node_modules/@npmcli/fs/lib/move-file.js | 78 + .../@npmcli/fs/lib/readdir-scoped.js | 20 + .../@npmcli/fs/lib/with-temp-dir.js | 10 +- deps/npm/node_modules/@npmcli/fs/package.json | 20 +- .../npm/node_modules/@npmcli/git/package.json | 29 +- .../{ => lib}/index.js | 85 +- .../node_modules/npm-bundled/index.js | 251 -- .../node_modules/npm-bundled/package.json | 30 - .../installed-package-contents/package.json | 49 +- .../@npmcli/map-workspaces/package.json | 19 +- .../@npmcli/metavuln-calculator/package.json | 23 +- .../@npmcli/node-gyp/package.json | 17 +- .../@npmcli/package-json/package.json | 19 +- .../lib/escape.js | 0 .../@npmcli/promise-spawn/lib/index.js | 170 +- .../@npmcli/promise-spawn/package.json | 22 +- .../node_modules/@npmcli/query/lib/index.js | 46 +- .../node_modules/@npmcli/query/package.json | 21 +- .../@npmcli/run-script/lib/make-spawn-args.js | 51 +- .../@npmcli/run-script/lib/run-script-pkg.js | 2 +- .../@npmcli/run-script/package.json | 27 +- deps/npm/node_modules/abbrev/lib/index.js | 50 + deps/npm/node_modules/abbrev/package.json | 44 +- .../npm/node_modules/abort-controller/LICENSE | 21 + .../node_modules/abort-controller/browser.js | 13 + .../node_modules/abort-controller/browser.mjs | 11 + .../dist/abort-controller.d.ts | 43 + .../abort-controller/dist/abort-controller.js | 127 + .../dist/abort-controller.js.map | 1 + .../dist/abort-controller.mjs | 118 + .../dist/abort-controller.mjs.map | 1 + .../dist/abort-controller.umd.js | 5 + .../dist/abort-controller.umd.js.map | 1 + .../abort-controller/package.json | 97 + .../node_modules/abort-controller/polyfill.js | 21 + .../abort-controller/polyfill.mjs | 19 + .../node_modules/agentkeepalive/History.md | 248 -- .../node_modules/buffer/AUTHORS.md | 73 + .../node_modules/buffer}/LICENSE | 18 +- .../node_modules/buffer/index.d.ts | 194 ++ .../node_modules/buffer/index.js | 2106 ++++++++++++++ .../node_modules/buffer/package.json | 93 + .../node_modules/readable-stream/LICENSE | 47 + .../readable-stream/lib/_stream_duplex.js | 3 + .../lib/_stream_passthrough.js | 3 + .../readable-stream/lib/_stream_readable.js | 3 + .../readable-stream/lib/_stream_transform.js | 3 + .../readable-stream/lib/_stream_writable.js | 3 + .../lib/internal/streams/add-abort-signal.js | 52 + .../lib/internal/streams/buffer_list.js | 180 ++ .../lib/internal/streams/compose.js | 161 ++ .../lib/internal/streams/destroy.js | 337 +++ .../lib/internal/streams/duplex.js | 157 + .../lib/internal/streams/duplexify.js | 425 +++ .../lib/internal/streams/end-of-stream.js | 262 ++ .../lib/internal/streams/from.js | 115 + .../lib/internal/streams/lazy_transform.js | 61 + .../lib/internal/streams/legacy.js | 100 + .../lib/internal/streams/operators.js | 534 ++++ .../lib/internal/streams/passthrough.js | 42 + .../lib/internal/streams/pipeline.js | 434 +++ .../lib/internal/streams/readable.js | 1339 +++++++++ .../lib/internal/streams/state.js | 33 + .../lib/internal/streams/transform.js | 196 ++ .../lib/internal/streams/utils.js | 328 +++ .../lib/internal/streams/writable.js | 893 ++++++ .../lib/internal/validators.js | 417 +++ .../readable-stream/lib/ours/browser.js | 36 + .../readable-stream/lib/ours/errors.js | 391 +++ .../readable-stream/lib/ours/index.js | 67 + .../readable-stream/lib/ours/primordials.js | 130 + .../readable-stream/lib/ours/util.js | 150 + .../readable-stream/lib/stream.js | 162 ++ .../readable-stream/lib/stream/promises.js | 43 + .../node_modules/readable-stream/package.json | 84 + .../are-we-there-yet/package.json | 20 +- deps/npm/node_modules/asap/CHANGES.md | 70 - deps/npm/node_modules/asap/asap.js | 65 - deps/npm/node_modules/asap/browser-asap.js | 66 - deps/npm/node_modules/asap/browser-raw.js | 223 -- deps/npm/node_modules/asap/package.json | 58 - deps/npm/node_modules/asap/raw.js | 101 - .../{asap/LICENSE.md => base64-js/LICENSE} | 18 +- .../node_modules/base64-js/base64js.min.js | 1 + deps/npm/node_modules/base64-js/index.d.ts | 3 + deps/npm/node_modules/base64-js/index.js | 150 + deps/npm/node_modules/base64-js/package.json | 47 + .../node_modules/bin-links/lib/check-bin.js | 4 +- .../npm/node_modules/bin-links/lib/fix-bin.js | 20 +- .../node_modules/bin-links/lib/link-gently.js | 24 +- .../node_modules/bin-links/lib/shim-bin.js | 4 +- .../npm-normalize-package-bin/LICENSE | 15 - deps/npm/node_modules/bin-links/package.json | 30 +- .../node_modules/cacache/lib/content/read.js | 77 +- .../node_modules/cacache/lib/content/rm.js | 6 +- .../node_modules/cacache/lib/content/write.js | 14 +- .../node_modules/cacache/lib/entry-index.js | 118 +- deps/npm/node_modules/cacache/lib/get.js | 55 - deps/npm/node_modules/cacache/lib/index.js | 3 - deps/npm/node_modules/cacache/lib/rm.js | 10 +- .../npm/node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/move-file.js | 4 +- deps/npm/node_modules/cacache/lib/util/tmp.js | 13 +- deps/npm/node_modules/cacache/lib/verify.js | 49 +- deps/npm/node_modules/cacache/package.json | 32 +- deps/npm/node_modules/ci-info/LICENSE | 21 + deps/npm/node_modules/ci-info/index.d.ts | 70 + deps/npm/node_modules/ci-info/index.js | 89 + deps/npm/node_modules/ci-info/package.json | 37 + deps/npm/node_modules/ci-info/vendors.json | 248 ++ .../cli-columns/{LICENSE => license} | 0 deps/npm/node_modules/cli-table3/package.json | 2 +- deps/npm/node_modules/cli-table3/src/cell.js | 7 +- deps/npm/node_modules/cmd-shim/lib/index.js | 18 +- deps/npm/node_modules/cmd-shim/package.json | 22 +- deps/npm/node_modules/debuglog/debuglog.js | 22 - deps/npm/node_modules/debuglog/package.json | 21 - deps/npm/node_modules/dezalgo/dezalgo.js | 22 - deps/npm/node_modules/dezalgo/package.json | 46 - .../node_modules/event-target-shim/LICENSE | 21 + .../dist/event-target-shim.js | 871 ++++++ .../dist/event-target-shim.js.map | 1 + .../dist/event-target-shim.mjs | 862 ++++++ .../dist/event-target-shim.mjs.map | 1 + .../dist/event-target-shim.umd.js | 6 + .../dist/event-target-shim.umd.js.map | 1 + .../node_modules/event-target-shim/index.d.ts | 399 +++ .../event-target-shim/package.json | 82 + deps/npm/node_modules/events/.airtap.yml | 15 + deps/npm/node_modules/events/History.md | 118 + deps/npm/node_modules/events/LICENSE | 22 + deps/npm/node_modules/events/events.js | 497 ++++ deps/npm/node_modules/events/package.json | 37 + deps/npm/node_modules/events/security.md | 10 + .../events/tests/add-listeners.js | 111 + .../events/tests/check-listener-leaks.js | 101 + deps/npm/node_modules/events/tests/common.js | 104 + deps/npm/node_modules/events/tests/errors.js | 13 + .../node_modules/events/tests/events-list.js | 28 + .../node_modules/events/tests/events-once.js | 234 ++ deps/npm/node_modules/events/tests/index.js | 64 + .../events/tests/legacy-compat.js | 16 + .../events/tests/listener-count.js | 37 + .../events/tests/listeners-side-effects.js | 56 + .../node_modules/events/tests/listeners.js | 168 ++ .../events/tests/max-listeners.js | 47 + .../node_modules/events/tests/method-names.js | 35 + .../events/tests/modify-in-emit.js | 90 + .../npm/node_modules/events/tests/num-args.js | 60 + deps/npm/node_modules/events/tests/once.js | 83 + deps/npm/node_modules/events/tests/prepend.js | 31 + .../events/tests/remove-all-listeners.js | 133 + .../events/tests/remove-listeners.js | 212 ++ .../tests/set-max-listeners-side-effects.js | 31 + .../events/tests/special-event-names.js | 45 + .../npm/node_modules/events/tests/subclass.js | 66 + deps/npm/node_modules/events/tests/symbols.js | 25 + .../node_modules/fastest-levenshtein/bench.js | 96 + .../fastest-levenshtein/esm/mod.d.ts | 4 + .../fastest-levenshtein/esm/mod.d.ts.map | 1 + .../fastest-levenshtein/esm/mod.js | 138 + .../fastest-levenshtein/index.d.ts | 2 - .../node_modules/fastest-levenshtein/index.js | 147 - .../node_modules/fastest-levenshtein/mod.d.ts | 3 + .../node_modules/fastest-levenshtein/mod.js | 142 + .../fastest-levenshtein/package.json | 43 +- .../node_modules/fastest-levenshtein/test.js | 113 +- .../node_modules/fastest-levenshtein/test.ts | 67 + deps/npm/node_modules/gauge/package.json | 19 +- .../hosted-git-info/lib/from-url.js | 122 + .../hosted-git-info/lib/git-host-info.js | 192 -- .../hosted-git-info/lib/git-host.js | 114 - .../node_modules/hosted-git-info/lib/hosts.js | 228 ++ .../node_modules/hosted-git-info/lib/index.js | 299 +- .../node_modules/hosted-git-info/package.json | 16 +- deps/npm/node_modules/humanize-ms/History.md | 25 - deps/npm/node_modules/ieee754/LICENSE | 11 + deps/npm/node_modules/ieee754/index.d.ts | 10 + deps/npm/node_modules/ieee754/index.js | 85 + deps/npm/node_modules/ieee754/package.json | 52 + .../npm/node_modules/ignore-walk/package.json | 18 +- .../init-package-json/package.json | 27 +- .../{ => lib}/index.js | 18 +- .../package.json | 45 +- deps/npm/node_modules/libnpmaccess/README.md | 235 +- .../node_modules/libnpmaccess/lib/index.js | 254 +- .../node_modules/libnpmaccess/package.json | 16 +- .../node_modules/libnpmdiff/lib/tarball.js | 6 +- deps/npm/node_modules/libnpmdiff/package.json | 19 +- .../libnpmexec/lib/file-exists.js | 4 +- deps/npm/node_modules/libnpmexec/lib/index.js | 78 +- .../node_modules/libnpmexec/lib/run-script.js | 11 +- deps/npm/node_modules/libnpmexec/package.json | 33 +- deps/npm/node_modules/libnpmfund/package.json | 12 +- deps/npm/node_modules/libnpmhook/package.json | 12 +- deps/npm/node_modules/libnpmorg/package.json | 12 +- deps/npm/node_modules/libnpmpack/lib/index.js | 12 +- deps/npm/node_modules/libnpmpack/package.json | 18 +- deps/npm/node_modules/libnpmpublish/README.md | 4 +- .../node_modules/libnpmpublish/lib/publish.js | 5 +- .../node_modules/libnpmpublish/package.json | 20 +- .../node_modules/libnpmsearch/package.json | 12 +- deps/npm/node_modules/libnpmteam/package.json | 12 +- .../node_modules/libnpmversion/package.json | 18 +- .../make-fetch-happen/package.json | 25 +- .../node_modules/minipass-fetch/lib/body.js | 2 +- .../node_modules/minipass-fetch/package.json | 20 +- .../node_modules/mkdirp-infer-owner/index.js | 26 - .../mkdirp-infer-owner/package.json | 36 - deps/npm/node_modules/mkdirp/CHANGELOG.md | 15 - deps/npm/node_modules/node-gyp/CHANGELOG.md | 32 + deps/npm/node_modules/node-gyp/addon.gypi | 43 +- .../gyp/.github/workflows/Python_tests.yml | 18 +- .../gyp/.github/workflows/node-gyp.yml | 21 +- .../gyp/.github/workflows/nodejs-windows.yml | 13 +- .../gyp/.github/workflows/release-please.yml | 4 +- .../node_modules/node-gyp/gyp/CHANGELOG.md | 56 + .../node-gyp/gyp/pylib/gyp/__init__.py | 24 + .../node-gyp/gyp/pylib/gyp/common.py | 13 +- .../node-gyp/gyp/pylib/gyp/flock_tool.py | 2 +- .../node-gyp/gyp/pylib/gyp/generator/make.py | 261 +- .../node-gyp/gyp/pylib/gyp/generator/msvs.py | 11 +- .../node-gyp/gyp/pylib/gyp/generator/ninja.py | 8 +- .../node-gyp/gyp/pylib/gyp/input.py | 21 +- .../node-gyp/gyp/pylib/gyp/xcodeproj_file.py | 2 +- .../node_modules/node-gyp/gyp/pyproject.toml | 41 + .../node-gyp/gyp/requirements_dev.txt | 2 - deps/npm/node_modules/node-gyp/gyp/setup.py | 42 - .../npm/node_modules/node-gyp/gyp/test_gyp.py | 1 + .../node-gyp/gyp/tools/pretty_gyp.py | 2 +- deps/npm/node_modules/node-gyp/lib/build.js | 2 + .../node_modules/node-gyp/lib/configure.js | 49 +- .../node-gyp/lib/find-visualstudio.js | 5 + .../node_modules/@npmcli/fs/LICENSE.md | 20 + .../@npmcli/fs/lib/common/get-options.js | 20 + .../@npmcli/fs/lib/common/node.js | 9 + .../@npmcli/fs/lib/common/owner-sync.js | 0 .../@npmcli/fs/lib/common/owner.js | 0 .../node_modules}/@npmcli/fs/lib/copy-file.js | 0 .../node_modules/@npmcli/fs/lib/cp/LICENSE | 15 + .../node_modules/@npmcli/fs/lib/cp/index.js | 22 + .../@npmcli/fs/lib/cp/polyfill.js | 428 +++ .../node_modules/@npmcli/fs/lib/errors.js | 129 + .../node_modules}/@npmcli/fs/lib/fs.js | 0 .../node_modules/@npmcli/fs/lib/index.js | 12 + .../node_modules}/@npmcli/fs/lib/mkdir.js | 0 .../node_modules}/@npmcli/fs/lib/mkdtemp.js | 0 .../node_modules}/@npmcli/fs/lib/rm/index.js | 0 .../@npmcli/fs/lib/rm/polyfill.js | 0 .../@npmcli/fs/lib/with-owner-sync.js | 0 .../@npmcli/fs/lib/with-owner.js | 0 .../@npmcli/fs/lib/with-temp-dir.js | 41 + .../@npmcli/fs/lib/write-file.js | 0 .../node_modules/@npmcli/fs}/package.json | 53 +- .../@npmcli/move-file/LICENSE.md | 0 .../@npmcli/move-file/lib/index.js | 0 .../@npmcli/move-file/package.json | 0 .../node-gyp/node_modules/abbrev/LICENSE | 46 + .../node_modules}/abbrev/abbrev.js | 0 .../node-gyp/node_modules/abbrev/package.json | 21 + .../node_modules/are-we-there-yet/LICENSE.md | 18 + .../are-we-there-yet/lib/index.js | 4 + .../are-we-there-yet/lib/tracker-base.js | 11 + .../are-we-there-yet/lib/tracker-group.js | 116 + .../are-we-there-yet/lib/tracker-stream.js | 36 + .../are-we-there-yet/lib/tracker.js | 32 + .../are-we-there-yet/package.json | 56 + .../node-gyp/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 241 ++ .../node_modules/cacache/lib/content/rm.js | 20 + .../node_modules/cacache/lib/content/write.js | 189 ++ .../node_modules/cacache/lib/entry-index.js | 404 +++ .../node-gyp/node_modules/cacache/lib/get.js | 225 ++ .../node_modules/cacache/lib/index.js | 45 + .../node_modules/cacache/lib/memoization.js | 72 + .../node-gyp/node_modules/cacache/lib/put.js | 80 + .../node-gyp/node_modules/cacache/lib/rm.js | 31 + .../cacache/lib/util/fix-owner.js | 0 .../cacache/lib/util/hash-to-segments.js | 7 + .../cacache/lib/util/move-file.js | 56 + .../node_modules/cacache/lib/util/tmp.js | 33 + .../node_modules/cacache/lib/verify.js | 257 ++ .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/index.js | 202 ++ .../node_modules/brace-expansion/package.json | 46 + .../cacache/node_modules/glob}/LICENSE | 2 +- .../cacache/node_modules/glob/common.js | 240 ++ .../cacache/node_modules/glob/glob.js | 790 +++++ .../cacache/node_modules/glob/package.json | 55 + .../cacache/node_modules/glob/sync.js | 486 ++++ .../cacache/node_modules/minimatch}/LICENSE | 2 +- .../node_modules/minimatch/lib/path.js | 4 + .../node_modules/minimatch/minimatch.js | 906 ++++++ .../node_modules/minimatch/package.json | 32 + .../node_modules/cacache/package.json | 84 + .../node-gyp/node_modules/gauge/LICENSE.md | 20 + .../node_modules/gauge/lib/base-theme.js | 18 + .../node-gyp/node_modules/gauge/lib/error.js | 24 + .../node_modules/gauge/lib/has-color.js | 4 + .../node-gyp/node_modules/gauge/lib/index.js | 289 ++ .../node_modules/gauge/lib/plumbing.js | 50 + .../node_modules/gauge/lib/process.js | 3 + .../node_modules/gauge/lib/progress-bar.js | 41 + .../node_modules/gauge/lib/render-template.js | 222 ++ .../node_modules/gauge/lib/set-immediate.js | 7 + .../node_modules/gauge/lib/set-interval.js | 3 + .../node-gyp/node_modules/gauge/lib/spin.js | 5 + .../node_modules/gauge/lib/template-item.js | 87 + .../node_modules/gauge/lib/theme-set.js | 122 + .../node-gyp/node_modules/gauge/lib/themes.js | 56 + .../node_modules/gauge/lib/wide-truncate.js | 31 + .../node-gyp/node_modules/gauge/package.json | 66 + .../node_modules/make-fetch-happen/LICENSE | 16 + .../make-fetch-happen/lib/agent.js | 214 ++ .../make-fetch-happen/lib/cache/entry.js | 444 +++ .../make-fetch-happen/lib/cache/errors.js | 11 + .../make-fetch-happen/lib/cache/index.js | 49 + .../make-fetch-happen/lib/cache/key.js | 17 + .../make-fetch-happen/lib/cache/policy.js | 161 ++ .../node_modules/make-fetch-happen/lib/dns.js | 49 + .../make-fetch-happen/lib/fetch.js | 118 + .../make-fetch-happen/lib/index.js | 41 + .../make-fetch-happen/lib/options.js | 52 + .../make-fetch-happen/lib/pipeline.js | 41 + .../make-fetch-happen/lib/remote.js | 121 + .../make-fetch-happen/package.json | 79 + .../node_modules/minipass-fetch/LICENSE} | 35 +- .../minipass-fetch/lib/abort-error.js | 17 + .../node_modules/minipass-fetch/lib/blob.js | 97 + .../node_modules/minipass-fetch/lib/body.js | 350 +++ .../minipass-fetch/lib/fetch-error.js | 32 + .../minipass-fetch/lib/headers.js | 267 ++ .../node_modules/minipass-fetch/lib/index.js | 365 +++ .../minipass-fetch/lib/request.js | 281 ++ .../minipass-fetch/lib/response.js | 90 + .../node_modules/minipass-fetch/package.json | 67 + .../node-gyp/node_modules/nopt/CHANGELOG.md | 58 - .../node-gyp/node_modules/nopt/bin/nopt.js | 80 +- .../node-gyp/node_modules/nopt/lib/nopt.js | 370 ++- .../node-gyp/node_modules/nopt/package.json | 35 +- .../node-gyp/node_modules/npmlog/LICENSE.md | 20 + .../node-gyp/node_modules/npmlog/lib/log.js | 404 +++ .../node_modules/npmlog}/package.json | 54 +- .../node-gyp/node_modules/ssri/LICENSE.md | 16 + .../node-gyp/node_modules/ssri/lib/index.js | 524 ++++ .../node-gyp/node_modules/ssri/package.json | 63 + .../node_modules/unique-filename/LICENSE | 5 + .../node_modules/unique-filename/lib/index.js | 7 + .../unique-filename}/package.json | 33 +- .../node_modules/unique-slug}/LICENSE | 2 +- .../node_modules/unique-slug/lib/index.js | 11 + .../node_modules/unique-slug}/package.json | 29 +- .../node_modules/which}/LICENSE | 0 .../node-gyp/node_modules/which/README.md | 54 + .../node_modules}/which/bin/node-which | 0 .../node-gyp/node_modules/which/package.json | 43 + .../node_modules}/which/which.js | 0 deps/npm/node_modules/node-gyp/package.json | 4 +- .../node-gyp/test/test-find-node-directory.js | 2 +- deps/npm/node_modules/node-gyp/update-gyp.py | 20 +- deps/npm/node_modules/nopt/package.json | 21 +- .../normalize-package-data/package.json | 19 +- .../npm-audit-report/package.json | 20 +- .../npm-normalize-package-bin/LICENSE | 15 - .../npm-normalize-package-bin/lib/index.js | 64 - .../npm/node_modules/npm-bundled/package.json | 22 +- .../npm-install-checks/package.json | 18 +- .../npm-normalize-package-bin/index.js | 60 - .../npm-normalize-package-bin/lib/index.js | 0 .../npm-normalize-package-bin/package.json | 42 +- .../npm-normalize-package-bin/test/array.js | 37 - .../npm-normalize-package-bin/test/nobin.js | 35 - .../npm-normalize-package-bin/test/object.js | 141 - .../npm-normalize-package-bin/test/string.js | 37 - .../node_modules/npm-package-arg/lib/npa.js | 13 +- .../node_modules/npm-package-arg/package.json | 25 +- deps/npm/node_modules/npm-packlist/README.md | 146 - .../node_modules/npm-packlist/bin/index.js | 39 - .../node_modules/npm-packlist/lib/index.js | 770 +++-- .../npm-normalize-package-bin/lib/index.js | 64 - .../node_modules/npm-packlist/package.json | 31 +- .../npm-normalize-package-bin/LICENSE | 15 - .../npm-normalize-package-bin/lib/index.js | 64 - .../npm-pick-manifest/package.json | 25 +- .../npm/node_modules/npm-profile/package.json | 23 +- .../npm-registry-fetch/package.json | 31 +- deps/npm/node_modules/npmlog/lib/log.js | 6 +- deps/npm/node_modules/npmlog/package.json | 23 +- deps/npm/node_modules/opener/README.md | 54 - .../npm/node_modules/opener/bin/opener-bin.js | 10 - deps/npm/node_modules/opener/lib/opener.js | 66 - deps/npm/node_modules/opener/package.json | 20 - deps/npm/node_modules/pacote/README.md | 3 +- deps/npm/node_modules/pacote/lib/dir.js | 16 +- deps/npm/node_modules/pacote/lib/fetcher.js | 62 +- deps/npm/node_modules/pacote/lib/git.js | 6 + deps/npm/node_modules/pacote/lib/registry.js | 8 - deps/npm/node_modules/pacote/lib/util/npm.js | 2 +- deps/npm/node_modules/pacote/package.json | 52 +- .../parse-conflict-json/package.json | 20 +- deps/npm/node_modules/proc-log/package.json | 17 +- deps/npm/node_modules/process/LICENSE | 22 + deps/npm/node_modules/process/browser.js | 184 ++ deps/npm/node_modules/process/index.js | 2 + deps/npm/node_modules/process/package.json | 27 + deps/npm/node_modules/process/test.js | 199 ++ .../node_modules/read-cmd-shim/lib/index.js | 6 +- .../node_modules/read-cmd-shim/package.json | 19 +- .../read-package-json-fast/{ => lib}/index.js | 51 +- .../read-package-json-fast/package.json | 43 +- .../npm-normalize-package-bin/LICENSE | 15 - .../npm-normalize-package-bin/lib/index.js | 64 - .../npm-normalize-package-bin/package.json | 41 - .../read-package-json/package.json | 25 +- .../readdir-scoped-modules/LICENSE | 15 - .../readdir-scoped-modules/package.json | 34 - .../readdir-scoped-modules/readdir.js | 121 - deps/npm/node_modules/rimraf/CHANGELOG.md | 65 - deps/npm/node_modules/semver/classes/range.js | 3 + deps/npm/node_modules/semver/index.js | 122 +- deps/npm/node_modules/semver/package.json | 31 +- deps/npm/node_modules/ssri/lib/index.js | 3 + deps/npm/node_modules/ssri/package.json | 17 +- deps/npm/node_modules/tar/lib/create.js | 25 +- deps/npm/node_modules/tar/lib/extract.js | 26 +- deps/npm/node_modules/tar/lib/header.js | 84 +- .../npm/node_modules/tar/lib/large-numbers.js | 39 +- deps/npm/node_modules/tar/lib/list.js | 33 +- deps/npm/node_modules/tar/lib/mkdir.js | 64 +- deps/npm/node_modules/tar/lib/mode-fix.js | 12 +- .../node_modules/tar/lib/normalize-unicode.js | 5 +- deps/npm/node_modules/tar/lib/pack.js | 85 +- deps/npm/node_modules/tar/lib/parse.js | 120 +- .../node_modules/tar/lib/path-reservations.js | 44 +- deps/npm/node_modules/tar/lib/pax.js | 23 +- deps/npm/node_modules/tar/lib/read-entry.js | 21 +- deps/npm/node_modules/tar/lib/replace.js | 77 +- .../tar/lib/strip-absolute-path.js | 2 +- deps/npm/node_modules/tar/lib/unpack.js | 117 +- deps/npm/node_modules/tar/lib/update.js | 12 +- deps/npm/node_modules/tar/lib/warn-mixin.js | 11 +- deps/npm/node_modules/tar/lib/write-entry.js | 67 +- deps/npm/node_modules/tar/package.json | 68 +- deps/npm/node_modules/treeverse/package.json | 18 +- .../node_modules/unique-filename/package.json | 21 +- .../npm/node_modules/unique-slug/package.json | 17 +- .../validate-npm-package-name/package.json | 17 +- deps/npm/node_modules/which/CHANGELOG.md | 166 -- deps/npm/node_modules/which/README.md | 22 +- deps/npm/node_modules/which/bin/which.js | 52 + deps/npm/node_modules/which/lib/index.js | 110 + deps/npm/node_modules/which/package.json | 45 +- .../write-file-atomic/package.json | 17 +- deps/npm/package.json | 135 +- .../test/lib/commands/adduser.js.test.cjs | 17 - .../test/lib/commands/completion.js.test.cjs | 19 +- .../test/lib/commands/config.js.test.cjs | 21 +- .../test/lib/commands/dist-tag.js.test.cjs | 2 +- .../test/lib/commands/init.js.test.cjs | 7 +- .../test/lib/commands/link.js.test.cjs | 5 + .../test/lib/commands/pack.js.test.cjs | 30 + .../test/lib/commands/publish.js.test.cjs | 100 +- .../test/lib/commands/query.js.test.cjs | 33 +- .../test/lib/commands/shrinkwrap.js.test.cjs | 12 +- .../test/lib/commands/version.js.test.cjs | 20 +- .../tap-snapshots/test/lib/docs.js.test.cjs | 441 ++- .../tap-snapshots/test/lib/npm.js.test.cjs | 1039 ------- .../test/lib/utils/error-message.js.test.cjs | 22 +- .../test/lib/utils/exit-handler.js.test.cjs | 58 +- .../lib/utils/explain-eresolve.js.test.cjs | 136 +- .../test/lib/utils/log-file.js.test.cjs | 2 +- .../test/lib/utils/tar.js.test.cjs | 46 +- deps/npm/test/bin/windows-shims.js | 2 - deps/npm/test/fixtures/mock-logs.js | 58 +- deps/npm/test/fixtures/mock-npm.js | 14 +- deps/npm/test/fixtures/mock-registry.js | 285 -- deps/npm/test/fixtures/sandbox.js | 23 +- deps/npm/test/index.js | 12 +- deps/npm/test/lib/arborist-cmd.js | 16 +- deps/npm/test/lib/auth/legacy.js | 429 --- deps/npm/test/lib/auth/oauth.js | 28 - deps/npm/test/lib/auth/saml.js | 28 - deps/npm/test/lib/auth/sso.js | 236 -- deps/npm/test/lib/cli.js | 34 +- deps/npm/test/lib/commands/access.js | 661 ++--- deps/npm/test/lib/commands/adduser.js | 313 +- deps/npm/test/lib/commands/audit.js | 33 +- deps/npm/test/lib/commands/bin.js | 32 - deps/npm/test/lib/commands/birthday.js | 15 - deps/npm/test/lib/commands/bugs.js | 2 +- deps/npm/test/lib/commands/cache.js | 4 +- deps/npm/test/lib/commands/ci.js | 32 +- deps/npm/test/lib/commands/config.js | 277 +- deps/npm/test/lib/commands/dedupe.js | 2 +- deps/npm/test/lib/commands/deprecate.js | 11 +- deps/npm/test/lib/commands/diff.js | 28 +- deps/npm/test/lib/commands/dist-tag.js | 2 +- deps/npm/test/lib/commands/edit.js | 43 +- deps/npm/test/lib/commands/exec.js | 4 +- deps/npm/test/lib/commands/explain.js | 9 + deps/npm/test/lib/commands/explore.js | 3 + deps/npm/test/lib/commands/find-dupes.js | 2 +- deps/npm/test/lib/commands/help.js | 1 + deps/npm/test/lib/commands/hook.js | 56 +- deps/npm/test/lib/commands/init.js | 10 +- deps/npm/test/lib/commands/install-ci-test.js | 9 + deps/npm/test/lib/commands/install-test.js | 9 + deps/npm/test/lib/commands/install.js | 536 ++-- deps/npm/test/lib/commands/link.js | 36 +- deps/npm/test/lib/commands/login.js | 143 + deps/npm/test/lib/commands/ls.js | 1 + deps/npm/test/lib/commands/owner.js | 2 +- deps/npm/test/lib/commands/pack.js | 18 + deps/npm/test/lib/commands/ping.js | 2 +- deps/npm/test/lib/commands/publish.js | 107 +- deps/npm/test/lib/commands/repo.js | 2 +- deps/npm/test/lib/commands/restart.js | 18 +- deps/npm/test/lib/commands/run-script.js | 9 - deps/npm/test/lib/commands/search.js | 2 +- deps/npm/test/lib/commands/set-script.js | 188 -- deps/npm/test/lib/commands/set.js | 4 + deps/npm/test/lib/commands/shrinkwrap.js | 12 +- deps/npm/test/lib/commands/star.js | 2 +- deps/npm/test/lib/commands/stars.js | 2 +- deps/npm/test/lib/commands/start.js | 16 +- deps/npm/test/lib/commands/stop.js | 15 +- deps/npm/test/lib/commands/test.js | 15 +- deps/npm/test/lib/commands/token.js | 2 + deps/npm/test/lib/commands/unpublish.js | 45 +- deps/npm/test/lib/commands/unstar.js | 2 +- deps/npm/test/lib/commands/whoami.js | 27 +- deps/npm/test/lib/docs.js | 6 +- deps/npm/test/lib/lifecycle-cmd.js | 3 + deps/npm/test/lib/load-all-commands.js | 1 + deps/npm/test/lib/npm.js | 162 +- deps/npm/test/lib/utils/config/definitions.js | 325 ++- deps/npm/test/lib/utils/display.js | 2 +- deps/npm/test/lib/utils/error-message.js | 19 +- deps/npm/test/lib/utils/exit-handler.js | 181 +- deps/npm/test/lib/utils/explain-eresolve.js | 37 +- deps/npm/test/lib/utils/log-file.js | 14 +- deps/npm/test/lib/utils/open-url-prompt.js | 13 +- deps/npm/test/lib/utils/open-url.js | 13 +- deps/npm/test/lib/utils/tar.js | 41 +- deps/npm/test/lib/utils/timers.js | 12 +- deps/npm/test/lib/utils/update-notifier.js | 8 +- 848 files changed, 47132 insertions(+), 26227 deletions(-) create mode 100644 deps/npm/docs/README.md delete mode 100644 deps/npm/docs/content/commands/npm-bin.md create mode 100644 deps/npm/docs/content/commands/npm-login.md delete mode 100644 deps/npm/docs/content/commands/npm-set-script.md rename deps/npm/docs/output/commands/{npm-bin.html => npm-login.html} (57%) delete mode 100644 deps/npm/docs/output/commands/npm-set-script.html delete mode 100644 deps/npm/lib/auth/legacy.js delete mode 100644 deps/npm/lib/auth/oauth.js delete mode 100644 deps/npm/lib/auth/saml.js delete mode 100644 deps/npm/lib/auth/sso.js delete mode 100644 deps/npm/lib/commands/bin.js delete mode 100644 deps/npm/lib/commands/birthday.js create mode 100644 deps/npm/lib/commands/login.js delete mode 100644 deps/npm/lib/commands/set-script.js create mode 100644 deps/npm/lib/utils/auth.js delete mode 100644 deps/npm/man/man1/npm-bin.1 create mode 100644 deps/npm/man/man1/npm-login.1 delete mode 100644 deps/npm/man/man1/npm-set-script.1 delete mode 100644 deps/npm/node_modules/@npmcli/ci-detect/LICENSE delete mode 100644 deps/npm/node_modules/@npmcli/ci-detect/lib/index.js delete mode 100644 deps/npm/node_modules/@npmcli/ci-detect/package.json create mode 100644 deps/npm/node_modules/@npmcli/config/README.md create mode 100644 deps/npm/node_modules/@npmcli/config/lib/errors.js rename deps/npm/node_modules/@npmcli/fs/lib/{ => cp}/errors.js (100%) create mode 100644 deps/npm/node_modules/@npmcli/fs/lib/move-file.js create mode 100644 deps/npm/node_modules/@npmcli/fs/lib/readdir-scoped.js rename deps/npm/node_modules/@npmcli/installed-package-contents/{ => lib}/index.js (81%) delete mode 100644 deps/npm/node_modules/@npmcli/installed-package-contents/node_modules/npm-bundled/index.js delete mode 100644 deps/npm/node_modules/@npmcli/installed-package-contents/node_modules/npm-bundled/package.json rename deps/npm/node_modules/@npmcli/{run-script => promise-spawn}/lib/escape.js (100%) create mode 100644 deps/npm/node_modules/abbrev/lib/index.js create mode 100644 deps/npm/node_modules/abort-controller/LICENSE create mode 100644 deps/npm/node_modules/abort-controller/browser.js create mode 100644 deps/npm/node_modules/abort-controller/browser.mjs create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.d.ts create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.js create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.js.map create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.mjs create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.mjs.map create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js create mode 100644 deps/npm/node_modules/abort-controller/dist/abort-controller.umd.js.map create mode 100644 deps/npm/node_modules/abort-controller/package.json create mode 100644 deps/npm/node_modules/abort-controller/polyfill.js create mode 100644 deps/npm/node_modules/abort-controller/polyfill.mjs delete mode 100644 deps/npm/node_modules/agentkeepalive/History.md create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/buffer/AUTHORS.md rename deps/npm/node_modules/{debuglog => are-we-there-yet/node_modules/buffer}/LICENSE (62%) create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/buffer/index.d.ts create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/buffer/index.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/buffer/package.json create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/add-abort-signal.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/buffer_list.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/compose.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/duplex.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/duplexify.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/end-of-stream.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/lazy_transform.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/legacy.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/operators.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/passthrough.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/pipeline.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/readable.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/state.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/transform.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/utils.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/writable.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/validators.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/ours/browser.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/ours/errors.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/ours/index.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/ours/primordials.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/ours/util.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/stream.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/lib/stream/promises.js create mode 100644 deps/npm/node_modules/are-we-there-yet/node_modules/readable-stream/package.json delete mode 100644 deps/npm/node_modules/asap/CHANGES.md delete mode 100644 deps/npm/node_modules/asap/asap.js delete mode 100644 deps/npm/node_modules/asap/browser-asap.js delete mode 100644 deps/npm/node_modules/asap/browser-raw.js delete mode 100644 deps/npm/node_modules/asap/package.json delete mode 100644 deps/npm/node_modules/asap/raw.js rename deps/npm/node_modules/{asap/LICENSE.md => base64-js/LICENSE} (63%) create mode 100644 deps/npm/node_modules/base64-js/base64js.min.js create mode 100644 deps/npm/node_modules/base64-js/index.d.ts create mode 100644 deps/npm/node_modules/base64-js/index.js create mode 100644 deps/npm/node_modules/base64-js/package.json delete mode 100644 deps/npm/node_modules/bin-links/node_modules/npm-normalize-package-bin/LICENSE create mode 100644 deps/npm/node_modules/cacache/lib/util/glob.js create mode 100644 deps/npm/node_modules/ci-info/LICENSE create mode 100644 deps/npm/node_modules/ci-info/index.d.ts create mode 100644 deps/npm/node_modules/ci-info/index.js create mode 100644 deps/npm/node_modules/ci-info/package.json create mode 100644 deps/npm/node_modules/ci-info/vendors.json rename deps/npm/node_modules/cli-columns/{LICENSE => license} (100%) delete mode 100644 deps/npm/node_modules/debuglog/debuglog.js delete mode 100644 deps/npm/node_modules/debuglog/package.json delete mode 100644 deps/npm/node_modules/dezalgo/dezalgo.js delete mode 100644 deps/npm/node_modules/dezalgo/package.json create mode 100644 deps/npm/node_modules/event-target-shim/LICENSE create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.js create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.js.map create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.mjs create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.mjs.map create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.umd.js create mode 100644 deps/npm/node_modules/event-target-shim/dist/event-target-shim.umd.js.map create mode 100644 deps/npm/node_modules/event-target-shim/index.d.ts create mode 100644 deps/npm/node_modules/event-target-shim/package.json create mode 100644 deps/npm/node_modules/events/.airtap.yml create mode 100644 deps/npm/node_modules/events/History.md create mode 100644 deps/npm/node_modules/events/LICENSE create mode 100644 deps/npm/node_modules/events/events.js create mode 100644 deps/npm/node_modules/events/package.json create mode 100644 deps/npm/node_modules/events/security.md create mode 100644 deps/npm/node_modules/events/tests/add-listeners.js create mode 100644 deps/npm/node_modules/events/tests/check-listener-leaks.js create mode 100644 deps/npm/node_modules/events/tests/common.js create mode 100644 deps/npm/node_modules/events/tests/errors.js create mode 100644 deps/npm/node_modules/events/tests/events-list.js create mode 100644 deps/npm/node_modules/events/tests/events-once.js create mode 100644 deps/npm/node_modules/events/tests/index.js create mode 100644 deps/npm/node_modules/events/tests/legacy-compat.js create mode 100644 deps/npm/node_modules/events/tests/listener-count.js create mode 100644 deps/npm/node_modules/events/tests/listeners-side-effects.js create mode 100644 deps/npm/node_modules/events/tests/listeners.js create mode 100644 deps/npm/node_modules/events/tests/max-listeners.js create mode 100644 deps/npm/node_modules/events/tests/method-names.js create mode 100644 deps/npm/node_modules/events/tests/modify-in-emit.js create mode 100644 deps/npm/node_modules/events/tests/num-args.js create mode 100644 deps/npm/node_modules/events/tests/once.js create mode 100644 deps/npm/node_modules/events/tests/prepend.js create mode 100644 deps/npm/node_modules/events/tests/remove-all-listeners.js create mode 100644 deps/npm/node_modules/events/tests/remove-listeners.js create mode 100644 deps/npm/node_modules/events/tests/set-max-listeners-side-effects.js create mode 100644 deps/npm/node_modules/events/tests/special-event-names.js create mode 100644 deps/npm/node_modules/events/tests/subclass.js create mode 100644 deps/npm/node_modules/events/tests/symbols.js create mode 100644 deps/npm/node_modules/fastest-levenshtein/bench.js create mode 100644 deps/npm/node_modules/fastest-levenshtein/esm/mod.d.ts create mode 100644 deps/npm/node_modules/fastest-levenshtein/esm/mod.d.ts.map create mode 100644 deps/npm/node_modules/fastest-levenshtein/esm/mod.js delete mode 100644 deps/npm/node_modules/fastest-levenshtein/index.d.ts delete mode 100644 deps/npm/node_modules/fastest-levenshtein/index.js create mode 100644 deps/npm/node_modules/fastest-levenshtein/mod.d.ts create mode 100644 deps/npm/node_modules/fastest-levenshtein/mod.js create mode 100644 deps/npm/node_modules/fastest-levenshtein/test.ts create mode 100644 deps/npm/node_modules/hosted-git-info/lib/from-url.js delete mode 100644 deps/npm/node_modules/hosted-git-info/lib/git-host-info.js delete mode 100644 deps/npm/node_modules/hosted-git-info/lib/git-host.js create mode 100644 deps/npm/node_modules/hosted-git-info/lib/hosts.js delete mode 100644 deps/npm/node_modules/humanize-ms/History.md create mode 100644 deps/npm/node_modules/ieee754/LICENSE create mode 100644 deps/npm/node_modules/ieee754/index.d.ts create mode 100644 deps/npm/node_modules/ieee754/index.js create mode 100644 deps/npm/node_modules/ieee754/package.json rename deps/npm/node_modules/json-parse-even-better-errors/{ => lib}/index.js (94%) delete mode 100644 deps/npm/node_modules/mkdirp-infer-owner/index.js delete mode 100644 deps/npm/node_modules/mkdirp-infer-owner/package.json delete mode 100644 deps/npm/node_modules/mkdirp/CHANGELOG.md create mode 100644 deps/npm/node_modules/node-gyp/gyp/pyproject.toml delete mode 100644 deps/npm/node_modules/node-gyp/gyp/requirements_dev.txt delete mode 100644 deps/npm/node_modules/node-gyp/gyp/setup.py create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/common/owner-sync.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/common/owner.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/copy-file.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/fs.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/mkdir.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/mkdtemp.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/rm/index.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/rm/polyfill.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/with-owner-sync.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/with-owner.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/fs/lib/write-file.js (100%) rename deps/npm/node_modules/{npm-bundled/node_modules/npm-normalize-package-bin => node-gyp/node_modules/@npmcli/fs}/package.json (60%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/move-file/LICENSE.md (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/move-file/lib/index.js (100%) rename deps/npm/node_modules/{ => node-gyp/node_modules}/@npmcli/move-file/package.json (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE rename deps/npm/node_modules/{ => node-gyp/node_modules}/abbrev/abbrev.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/lib/tracker-base.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/lib/tracker-group.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/lib/tracker-stream.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/lib/tracker.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/are-we-there-yet/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js rename deps/npm/node_modules/{ => node-gyp/node_modules}/cacache/lib/util/fix-owner.js (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json rename deps/npm/node_modules/{mkdirp-infer-owner => node-gyp/node_modules/cacache/node_modules/glob}/LICENSE (92%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js rename deps/npm/node_modules/{@npmcli/installed-package-contents/node_modules/npm-bundled => node-gyp/node_modules/cacache/node_modules/minimatch}/LICENSE (92%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/cacache/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/base-theme.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/error.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/has-color.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/plumbing.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/process.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/progress-bar.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/render-template.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/set-immediate.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/set-interval.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/spin.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/template-item.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/theme-set.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/themes.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/lib/wide-truncate.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/gauge/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json rename deps/npm/node_modules/{opener/LICENSE.txt => node-gyp/node_modules/minipass-fetch/LICENSE} (55%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/fetch-error.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json delete mode 100644 deps/npm/node_modules/node-gyp/node_modules/nopt/CHANGELOG.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/npmlog/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/npmlog/lib/log.js rename deps/npm/node_modules/{npm-packlist/node_modules/npm-normalize-package-bin => node-gyp/node_modules/npmlog}/package.json (51%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/ssri/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js rename deps/npm/node_modules/{npm-pick-manifest/node_modules/npm-normalize-package-bin => node-gyp/node_modules/unique-filename}/package.json (68%) rename deps/npm/node_modules/{npm-packlist/node_modules/npm-normalize-package-bin => node-gyp/node_modules/unique-slug}/LICENSE (96%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js rename deps/npm/node_modules/{bin-links/node_modules/npm-normalize-package-bin => node-gyp/node_modules/unique-slug}/package.json (76%) rename deps/npm/node_modules/{dezalgo => node-gyp/node_modules/which}/LICENSE (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/README.md rename deps/npm/node_modules/{ => node-gyp/node_modules}/which/bin/node-which (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/which/package.json rename deps/npm/node_modules/{ => node-gyp/node_modules}/which/which.js (100%) delete mode 100644 deps/npm/node_modules/npm-bundled/node_modules/npm-normalize-package-bin/LICENSE delete mode 100644 deps/npm/node_modules/npm-bundled/node_modules/npm-normalize-package-bin/lib/index.js delete mode 100644 deps/npm/node_modules/npm-normalize-package-bin/index.js rename deps/npm/node_modules/{bin-links/node_modules => }/npm-normalize-package-bin/lib/index.js (100%) delete mode 100644 deps/npm/node_modules/npm-normalize-package-bin/test/array.js delete mode 100644 deps/npm/node_modules/npm-normalize-package-bin/test/nobin.js delete mode 100644 deps/npm/node_modules/npm-normalize-package-bin/test/object.js delete mode 100644 deps/npm/node_modules/npm-normalize-package-bin/test/string.js delete mode 100644 deps/npm/node_modules/npm-packlist/README.md delete mode 100755 deps/npm/node_modules/npm-packlist/bin/index.js delete mode 100644 deps/npm/node_modules/npm-packlist/node_modules/npm-normalize-package-bin/lib/index.js delete mode 100644 deps/npm/node_modules/npm-pick-manifest/node_modules/npm-normalize-package-bin/LICENSE delete mode 100644 deps/npm/node_modules/npm-pick-manifest/node_modules/npm-normalize-package-bin/lib/index.js delete mode 100644 deps/npm/node_modules/opener/README.md delete mode 100755 deps/npm/node_modules/opener/bin/opener-bin.js delete mode 100644 deps/npm/node_modules/opener/lib/opener.js delete mode 100644 deps/npm/node_modules/opener/package.json create mode 100644 deps/npm/node_modules/process/LICENSE create mode 100644 deps/npm/node_modules/process/browser.js create mode 100644 deps/npm/node_modules/process/index.js create mode 100644 deps/npm/node_modules/process/package.json create mode 100644 deps/npm/node_modules/process/test.js rename deps/npm/node_modules/read-package-json-fast/{ => lib}/index.js (83%) delete mode 100644 deps/npm/node_modules/read-package-json/node_modules/npm-normalize-package-bin/LICENSE delete mode 100644 deps/npm/node_modules/read-package-json/node_modules/npm-normalize-package-bin/lib/index.js delete mode 100644 deps/npm/node_modules/read-package-json/node_modules/npm-normalize-package-bin/package.json delete mode 100644 deps/npm/node_modules/readdir-scoped-modules/LICENSE delete mode 100644 deps/npm/node_modules/readdir-scoped-modules/package.json delete mode 100644 deps/npm/node_modules/readdir-scoped-modules/readdir.js delete mode 100644 deps/npm/node_modules/rimraf/CHANGELOG.md delete mode 100644 deps/npm/node_modules/which/CHANGELOG.md create mode 100755 deps/npm/node_modules/which/bin/which.js create mode 100644 deps/npm/node_modules/which/lib/index.js delete mode 100644 deps/npm/tap-snapshots/test/lib/commands/adduser.js.test.cjs delete mode 100644 deps/npm/tap-snapshots/test/lib/npm.js.test.cjs delete mode 100644 deps/npm/test/fixtures/mock-registry.js delete mode 100644 deps/npm/test/lib/auth/legacy.js delete mode 100644 deps/npm/test/lib/auth/oauth.js delete mode 100644 deps/npm/test/lib/auth/saml.js delete mode 100644 deps/npm/test/lib/auth/sso.js delete mode 100644 deps/npm/test/lib/commands/bin.js delete mode 100644 deps/npm/test/lib/commands/birthday.js create mode 100644 deps/npm/test/lib/commands/login.js delete mode 100644 deps/npm/test/lib/commands/set-script.js diff --git a/deps/npm/README.md b/deps/npm/README.md index 7e4a5f38a7607f..b88882fadfee09 100644 --- a/deps/npm/README.md +++ b/deps/npm/README.md @@ -9,9 +9,9 @@ One of the following versions of [Node.js](https://nodejs.org/en/download/) must be installed to run **`npm`**: -* `12.x.x` >= `12.13.0` -* `14.x.x` >= `14.15.0` -* `16.0.0` or higher +* `14.x.x` >= `14.17.0` +* `16.x.x` >= `16.13.0` +* `18.0.0` or higher ### Installation diff --git a/deps/npm/bin/npx-cli.js b/deps/npm/bin/npx-cli.js index cb05e1cb706c6e..75090aed41f1fb 100755 --- a/deps/npm/bin/npx-cli.js +++ b/deps/npm/bin/npx-cli.js @@ -98,6 +98,7 @@ for (i = 3; i < process.argv.length; i++) { } if (removed.has(key)) { + // eslint-disable-next-line no-console console.error(`npx: the --${key} argument has been removed.`) sawRemovedFlags = true process.argv.splice(i, 1) @@ -122,6 +123,7 @@ for (i = 3; i < process.argv.length; i++) { } if (sawRemovedFlags) { + // eslint-disable-next-line no-console console.error('See `npm help exec` for more information') } diff --git a/deps/npm/docs/README.md b/deps/npm/docs/README.md new file mode 100644 index 00000000000000..5fc7ccf6cd60ac --- /dev/null +++ b/deps/npm/docs/README.md @@ -0,0 +1,5 @@ +# docs + +[![CI - docs](https://github.com/npm/cli/actions/workflows/ci-docs.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-docs.yml) + +Scripts to build the npm docs. diff --git a/deps/npm/docs/content/commands/npm-access.md b/deps/npm/docs/content/commands/npm-access.md index e707eb6f5e212d..f2078e1c9c38dd 100644 --- a/deps/npm/docs/content/commands/npm-access.md +++ b/deps/npm/docs/content/commands/npm-access.md @@ -7,15 +7,13 @@ description: Set access level on published packages ### Synopsis ```bash -npm access public [] -npm access restricted [] +npm access list packages [|| [] +npm access list collaborators [ []] +npm access get status [] +npm access set status=public|private [] +npm access set mfa=none|publish|automation [] npm access grant [] npm access revoke [] -npm access 2fa-required [] -npm access 2fa-not-required [] -npm access ls-packages [||] -npm access ls-collaborators [ []] -npm access edit [] ``` ### Description @@ -79,12 +77,17 @@ Management of teams and team memberships is done with the `npm team` command. ### Configuration -#### `registry` +#### `json` -* Default: "https://registry.npmjs.org/" -* Type: URL +* Default: false +* Type: Boolean -The base URL of the npm registry. +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. #### `otp` @@ -97,6 +100,13 @@ when publishing or changing package permissions with `npm access`. If not set, and a registry response fails with a challenge for a one-time password, npm will prompt on the command line for one. +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + ### See Also * [`libnpmaccess`](https://npm.im/libnpmaccess) diff --git a/deps/npm/docs/content/commands/npm-adduser.md b/deps/npm/docs/content/commands/npm-adduser.md index 2d040c6a4a5792..f0cd57be25a2b6 100644 --- a/deps/npm/docs/content/commands/npm-adduser.md +++ b/deps/npm/docs/content/commands/npm-adduser.md @@ -9,29 +9,19 @@ description: Add a registry user account ```bash npm adduser -aliases: login, add-user +alias: add-user ``` Note: This command is unaware of workspaces. ### Description -Create or verify a user named `` in the specified registry, and -save the credentials to the `.npmrc` file. If no registry is specified, -the default registry will be used (see [`config`](/using-npm/config)). +Create a new user in the specified registry, and save the credentials to +the `.npmrc` file. If no registry is specified, the default registry +will be used (see [`registry`](/using-npm/registry)). -The username, password, and email are read in from prompts. - -To reset your password, go to - -To change your email address, go to - -You may use this command multiple times with the same user account to -authorize on a new machine. When authenticating on a new machine, -the username, password and email address must all match with -your existing record. - -`npm login` is an alias to `adduser` and behaves exactly the same way. +When using `legacy` for your `auth-type`, the username, password, and +email are read in from prompts. ### Configuration @@ -74,11 +64,8 @@ npm init --scope=@foo --yes #### `auth-type` -* Default: "legacy" -* Type: "legacy", "web", "sso", "saml", "oauth", or "webauthn" - -NOTE: auth-type values "sso", "saml", "oauth", and "webauthn" will be -removed in a future version. +* Default: "web" +* Type: "legacy" or "web" What authentication strategy to use with `login`. diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md index ab6395502328ea..25c733243660e9 100644 --- a/deps/npm/docs/content/commands/npm-audit.md +++ b/deps/npm/docs/content/commands/npm-audit.md @@ -100,9 +100,9 @@ the path `/-/npm/v1/security/advisories/bulk`. Any packages in the tree that do not have a `version` field in their package.json file will be ignored. If any `--omit` options are specified -(either via the `--omit` config, or one of the shorthands such as -`--production`, `--only=dev`, and so on), then packages will be omitted -from the submitted payload as appropriate. +(either via the [`--omit` config](/using-npm/config#omit), or one of the +shorthands such as `--production`, `--only=dev`, and so on), then packages will +be omitted from the submitted payload as appropriate. If the registry responds with an error, or with an invalid response, then npm will attempt to load advisory data from the `Quick Audit` endpoint. @@ -179,7 +179,7 @@ vulnerabilities are found _or_ if the remediation is able to successfully fix all vulnerabilities. If vulnerabilities were found the exit code will depend on the -`audit-level` configuration setting. +[`audit-level` config](/using-npm/config#audit-level). ### Examples @@ -408,12 +408,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-bin.md b/deps/npm/docs/content/commands/npm-bin.md deleted file mode 100644 index 16a28a8e1c3ace..00000000000000 --- a/deps/npm/docs/content/commands/npm-bin.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: npm-bin -section: 1 -description: Display npm bin folder ---- - -### Synopsis - -```bash -npm bin -``` - -Note: This command is unaware of workspaces. - -### Description - -Print the folder where npm will install executables. - -### Configuration - -#### `global` - -* Default: false -* Type: Boolean - -Operates in "global" mode, so that packages are installed into the `prefix` -folder instead of the current working directory. See -[folders](/configuring-npm/folders) for more on the differences in behavior. - -* packages are installed into the `{prefix}/lib/node_modules` folder, instead - of the current working directory. -* bin files are linked to `{prefix}/bin` -* man pages are linked to `{prefix}/share/man` - -### See Also - -* [npm prefix](/commands/npm-prefix) -* [npm root](/commands/npm-root) -* [npm folders](/configuring-npm/folders) -* [npm config](/commands/npm-config) -* [npmrc](/configuring-npm/npmrc) diff --git a/deps/npm/docs/content/commands/npm-bugs.md b/deps/npm/docs/content/commands/npm-bugs.md index 153355fc694f7e..1f135fc6c933a9 100644 --- a/deps/npm/docs/content/commands/npm-bugs.md +++ b/deps/npm/docs/content/commands/npm-bugs.md @@ -16,8 +16,9 @@ alias: issues This command tries to guess at the likely location of a package's bug tracker URL or the `mailto` URL of the support email, and then tries to -open it using the `--browser` config param. If no package name is provided, it -will search for a `package.json` in the current folder and use the `name` property. +open it using the [`--browser` config](/using-npm/config#browser) param. If no +package name is provided, it will search for a `package.json` in the current +folder and use the `name` property. ### Configuration diff --git a/deps/npm/docs/content/commands/npm-cache.md b/deps/npm/docs/content/commands/npm-cache.md index a6ef86dd22501f..a9d76179e81162 100644 --- a/deps/npm/docs/content/commands/npm-cache.md +++ b/deps/npm/docs/content/commands/npm-cache.md @@ -75,8 +75,7 @@ verify`. * Default: Windows: `%LocalAppData%\npm-cache`, Posix: `~/.npm` * Type: Path -The location of npm's cache directory. See [`npm -cache`](/commands/npm-cache) +The location of npm's cache directory. ### See Also diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 474e27cdd0d967..4a5caf7d0c691f 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -106,27 +106,39 @@ folder instead of the current working directory. See * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `omit` @@ -298,12 +310,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-config.md b/deps/npm/docs/content/commands/npm-config.md index 311183b9170101..fe87abac903e8a 100644 --- a/deps/npm/docs/content/commands/npm-config.md +++ b/deps/npm/docs/content/commands/npm-config.md @@ -12,6 +12,7 @@ npm config get [ [ ...]] npm config delete [ ...] npm config list [--json] npm config edit +npm config fix alias: c ``` @@ -26,7 +27,7 @@ variables, `npmrc` files, and in some cases, the `package.json` file. See [npmrc](/configuring-npm/npmrc) for more information about the npmrc files. -See [config(7)](/using-npm/config) for a more thorough explanation of the +See [config](/using-npm/config) for a more thorough explanation of the mechanisms involved, and a full list of config options available. The `npm config` command can be used to update and edit the contents @@ -91,6 +92,16 @@ npm config edit Opens the config file in an editor. Use the `--global` flag to edit the global config. +#### fix + +```bash +npm config fix +``` + +Attempts to repair invalid configuration items. Usually this means +attaching authentication config (i.e. `_auth`, `_authToken`) to the +configured `registry`. + ### Configuration #### `json` @@ -121,8 +132,8 @@ folder instead of the current working directory. See #### `editor` -* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on - Windows, or 'vim' on Unix systems +* Default: The EDITOR or VISUAL environment variables, or + '%SYSTEMROOT%\notepad.exe' on Windows, or 'vi' on Unix systems * Type: String The command to run for `npm edit` and `npm config edit`. diff --git a/deps/npm/docs/content/commands/npm-dedupe.md b/deps/npm/docs/content/commands/npm-dedupe.md index f7fff73dc1786e..80353bad5d0d7a 100644 --- a/deps/npm/docs/content/commands/npm-dedupe.md +++ b/deps/npm/docs/content/commands/npm-dedupe.md @@ -77,27 +77,39 @@ values in `package.json` you can run: `npm update --save` instead. ### Configuration -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -257,12 +269,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-dist-tag.md b/deps/npm/docs/content/commands/npm-dist-tag.md index d22831d2ce823e..e4a7fba665db38 100644 --- a/deps/npm/docs/content/commands/npm-dist-tag.md +++ b/deps/npm/docs/content/commands/npm-dist-tag.md @@ -19,8 +19,8 @@ alias: dist-tags Add, remove, and enumerate distribution tags on a package: * add: Tags the specified version of the package with the specified tag, - or the `--tag` config if not specified. If you have two-factor - authentication on auth-and-writes then you’ll need to include a + or the [`--tag` config](/using-npm/config#tag) if not specified. If you have + two-factor authentication on auth-and-writes then you’ll need to include a one-time password on the command line with `--otp `, or at the OTP prompt. diff --git a/deps/npm/docs/content/commands/npm-docs.md b/deps/npm/docs/content/commands/npm-docs.md index e4c88dc19d337c..027a101d121e6b 100644 --- a/deps/npm/docs/content/commands/npm-docs.md +++ b/deps/npm/docs/content/commands/npm-docs.md @@ -15,10 +15,10 @@ alias: home ### Description This command tries to guess at the likely location of a package's -documentation URL, and then tries to open it using the `--browser` config -param. You can pass multiple package names at once. If no package name is -provided, it will search for a `package.json` in the current folder and use -the `name` property. +documentation URL, and then tries to open it using the +[`--browser` config](/using-npm/config#browser) param. You can pass multiple +package names at once. If no package name is provided, it will search for a +`package.json` in the current folder and use the `name` property. ### Configuration diff --git a/deps/npm/docs/content/commands/npm-edit.md b/deps/npm/docs/content/commands/npm-edit.md index 9f6750ff175415..04a4d15b5afcd9 100644 --- a/deps/npm/docs/content/commands/npm-edit.md +++ b/deps/npm/docs/content/commands/npm-edit.md @@ -29,8 +29,8 @@ changes to your locally installed copy. #### `editor` -* Default: The EDITOR or VISUAL environment variables, or 'notepad.exe' on - Windows, or 'vim' on Unix systems +* Default: The EDITOR or VISUAL environment variables, or + '%SYSTEMROOT%\notepad.exe' on Windows, or 'vi' on Unix systems * Type: String The command to run for `npm edit` and `npm config edit`. diff --git a/deps/npm/docs/content/commands/npm-exec.md b/deps/npm/docs/content/commands/npm-exec.md index b968671ed84670..af19ec5f077838 100644 --- a/deps/npm/docs/content/commands/npm-exec.md +++ b/deps/npm/docs/content/commands/npm-exec.md @@ -33,10 +33,11 @@ specified multiple times, to execute the supplied command in an environment where all specified packages are available. If any requested packages are not present in the local project -dependencies, then they are installed to a folder in the npm cache, which -is added to the `PATH` environment variable in the executed process. A -prompt is printed (which can be suppressed by providing either `--yes` or -`--no`). +dependencies, then a prompt is printed, which can be suppressed by +providing either `--yes` or `--no`. When standard input is not a TTY or a +CI environment is detected, `--yes` is assumed. The requested packages are +installed to a folder in the npm cache, which is added to the `PATH` +environment variable in the executed process. Package names provided without a specifier will be matched with whatever version exists in the local project. Package names with a specifier will @@ -211,7 +212,8 @@ $ npx -c 'eslint && say "hooray, lint passed"' ### Workspaces support -You may use the `workspace` or `workspaces` configs in order to run an +You may use the [`workspace`](/using-npm/config#workspace) or +[`workspaces`](/using-npm/config#workspaces) configs in order to run an arbitrary command from an npm package (either one installed locally, or fetched remotely) in the context of the specified workspaces. If no positional argument or `--call` option is provided, it will open an @@ -242,9 +244,9 @@ Assuming the workspace configuration is properly set up at the root level ``` You can execute an arbitrary command from a package in the context of each of -the configured workspaces when using the `workspaces` configuration options, -in this example we're using **eslint** to lint any js file found within each -workspace folder: +the configured workspaces when using the +[`workspaces` config options](/using-npm/config#workspace), in this example +we're using **eslint** to lint any js file found within each workspace folder: ``` npm exec --ws -- eslint ./*.js diff --git a/deps/npm/docs/content/commands/npm-find-dupes.md b/deps/npm/docs/content/commands/npm-find-dupes.md index 723f04cfb11887..a3ef44eb5b7fb1 100644 --- a/deps/npm/docs/content/commands/npm-find-dupes.md +++ b/deps/npm/docs/content/commands/npm-find-dupes.md @@ -17,27 +17,39 @@ duplications, without actually changing the package tree. ### Configuration -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -184,12 +196,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-fund.md b/deps/npm/docs/content/commands/npm-fund.md index 25a6735ec75473..0db66f49ad65b1 100644 --- a/deps/npm/docs/content/commands/npm-fund.md +++ b/deps/npm/docs/content/commands/npm-fund.md @@ -16,9 +16,10 @@ This command retrieves information on how to fund the dependencies of a given project. If no package name is provided, it will list all dependencies that are looking for funding in a tree structure, listing the type of funding and the url to visit. If a package name is provided -then it tries to open its funding url using the `--browser` config -param; if there are multiple funding sources for the package, the user -will be instructed to pass the `--which` option to disambiguate. +then it tries to open its funding url using the +[`--browser` config](/using-npm/config#browser) param; if there are multiple +funding sources for the package, the user will be instructed to pass the +`--which` option to disambiguate. The list will avoid duplicated entries and will stack all packages that share the same url as a single entry. Thus, the list does not have the @@ -29,7 +30,8 @@ same shape of the output from `npm ls`. ### Workspaces support It's possible to filter the results to only include a single workspace -and its dependencies using the `workspace` config option. +and its dependencies using the +[`workspace` config](/using-npm/config#workspace) option. #### Example: diff --git a/deps/npm/docs/content/commands/npm-install-ci-test.md b/deps/npm/docs/content/commands/npm-install-ci-test.md index c6c7f2196da2e5..9fd7c267b102f5 100644 --- a/deps/npm/docs/content/commands/npm-install-ci-test.md +++ b/deps/npm/docs/content/commands/npm-install-ci-test.md @@ -52,27 +52,39 @@ folder instead of the current working directory. See * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `omit` @@ -244,12 +256,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md index aa1ff35bc45892..5642472630b530 100644 --- a/deps/npm/docs/content/commands/npm-install-test.md +++ b/deps/npm/docs/content/commands/npm-install-test.md @@ -53,27 +53,39 @@ folder instead of the current working directory. See * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `omit` @@ -245,12 +257,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index dc935de70aaf0f..3604aab4b940f2 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -118,7 +118,7 @@ into a tarball (b). * `npm install [<@scope>/]`: Do a `@` install, where `` is the "tag" config. (See - [`config`](/using-npm/config). The config's default value is `latest`.) + [`config`](/using-npm/config#tag). The config's default value is `latest`.) In most cases, this will install the version of the modules tagged as `latest` on the npm registry. @@ -443,27 +443,39 @@ folder instead of the current working directory. See * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `omit` @@ -635,12 +647,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### Algorithm diff --git a/deps/npm/docs/content/commands/npm-link.md b/deps/npm/docs/content/commands/npm-link.md index 07917bc833003b..09459cc0ca8a50 100644 --- a/deps/npm/docs/content/commands/npm-link.md +++ b/deps/npm/docs/content/commands/npm-link.md @@ -144,27 +144,39 @@ folder instead of the current working directory. See * bin files are linked to `{prefix}/bin` * man pages are linked to `{prefix}/share/man` -#### `global-style` +#### `install-strategy` + +* Default: "hoisted" +* Type: "hoisted", "nested", or "shallow" + +Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as necessary +within directory structure. nested: (formerly --legacy-bundling) install in +place, no hoisting. shallow (formerly --global-style) only install direct +deps at top-level. linked: (coming soon) install in node_modules/.store, +link in place, unhoisted. + +#### `legacy-bundling` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=nested` -Causes npm to install the package into your local `node_modules` folder with -the same layout it uses with the global `node_modules` folder. Only your -direct dependencies will show in `node_modules` and everything they depend -on will be flattened in their `node_modules` folders. This obviously will -eliminate some deduping. If used with `legacy-bundling`, `legacy-bundling` -will be preferred. +Instead of hoisting package installs in `node_modules`, install packages in +the same manner that they are depended on. This may cause very deep +directory structures and duplicate package installs as there is no +de-duplicating. Sets `--install-strategy=nested`. -#### `legacy-bundling` +#### `global-style` * Default: false * Type: Boolean +* DEPRECATED: This option has been deprecated in favor of + `--install-strategy=shallow` -Causes npm to install the package such that versions of npm prior to 1.4, -such as the one included with node 0.8, can install the package. This -eliminates all automatic deduping. If used with `global-style` this option -will be preferred. +Only install direct dependencies in the top level `node_modules`, but hoist +on deeper dependendencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -324,12 +336,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-login.md b/deps/npm/docs/content/commands/npm-login.md new file mode 100644 index 00000000000000..4feb7150400268 --- /dev/null +++ b/deps/npm/docs/content/commands/npm-login.md @@ -0,0 +1,87 @@ +--- +title: npm-login +section: 1 +description: Login to a registry user account +--- + +### Synopsis + +```bash +npm login +``` + +Note: This command is unaware of workspaces. + +### Description + +Verify a user in the specified registry, and save the credentials to the +`.npmrc` file. If no registry is specified, the default registry will be +used (see [`config`](/using-npm/config)). + +When using `legacy` for your `auth-type`, the username and password, are +read in from prompts. + +To reset your password, go to + +To change your email address, go to + +You may use this command multiple times with the same user account to +authorize on a new machine. When authenticating on a new machine, +the username, password and email address must all match with +your existing record. + +### Configuration + +#### `registry` + +* Default: "https://registry.npmjs.org/" +* Type: URL + +The base URL of the npm registry. + +#### `scope` + +* Default: the scope of the current project, if any, or "" +* Type: String + +Associate an operation with a scope for a scoped registry. + +Useful when logging in to or out of a private registry: + +``` +# log in, linking the scope to the custom registry +npm login --scope=@mycorp --registry=https://registry.mycorp.com + +# log out, removing the link and the auth token +npm logout --scope=@mycorp +``` + +This will cause `@mycorp` to be mapped to the registry for future +installation of packages specified according to the pattern +`@mycorp/package`. + +This will also cause `npm init` to create a scoped package. + +``` +# accept all defaults, and create a package named "@foo/whatever", +# instead of just named "whatever" +npm init --scope=@foo --yes +``` + + +#### `auth-type` + +* Default: "web" +* Type: "legacy" or "web" + +What authentication strategy to use with `login`. + +### See Also + +* [npm registry](/using-npm/registry) +* [npm config](/commands/npm-config) +* [npmrc](/configuring-npm/npmrc) +* [npm owner](/commands/npm-owner) +* [npm whoami](/commands/npm-whoami) +* [npm token](/commands/npm-token) +* [npm profile](/commands/npm-profile) diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 43d4eb7d6791cb..385c36728b7e70 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@8.19.3 /path/to/npm +npm@9.1.3 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` @@ -236,12 +236,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-pkg.md b/deps/npm/docs/content/commands/npm-pkg.md index 519104457df79e..484aabfca5431c 100644 --- a/deps/npm/docs/content/commands/npm-pkg.md +++ b/deps/npm/docs/content/commands/npm-pkg.md @@ -144,7 +144,8 @@ Returned values are always in **json** format. ### Workspaces support You can set/get/delete items across your configured workspaces by using the -`workspace` or `workspaces` config options. +[`workspace`](/using-npm/config#workspace) or +[`workspaces`](/using-npm/config#workspaces) config options. For example, setting a `funding` value across all configured workspaces of a project: diff --git a/deps/npm/docs/content/commands/npm-prune.md b/deps/npm/docs/content/commands/npm-prune.md index 95946d9dc969ca..0cd540f5c2d429 100644 --- a/deps/npm/docs/content/commands/npm-prune.md +++ b/deps/npm/docs/content/commands/npm-prune.md @@ -157,12 +157,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-publish.md b/deps/npm/docs/content/commands/npm-publish.md index 4963cf158a660d..09756aedf02724 100644 --- a/deps/npm/docs/content/commands/npm-publish.md +++ b/deps/npm/docs/content/commands/npm-publish.md @@ -103,19 +103,19 @@ tarball that will be compared with the local files by default. #### `access` -* Default: 'restricted' for scoped packages, 'public' for unscoped packages +* Default: 'public' for new packages, existing packages it will not change the + current level * Type: null, "restricted", or "public" -When publishing scoped packages, the access level defaults to `restricted`. -If you want your scoped package to be publicly viewable (and installable) -set `--access=public`. The only valid values for `access` are `public` and -`restricted`. Unscoped packages _always_ have an access level of `public`. +If do not want your scoped package to be publicly viewable (and installable) +set `--access=restricted`. -Note: Using the `--access` flag on the `npm publish` command will only set -the package access level on the initial publish of the package. Any -subsequent `npm publish` commands using the `--access` flag will not have an -effect to the access level. To make changes to the access level after the -initial publish use `npm access`. +Unscoped packages can not be set to `restricted`. + +Note: This defaults to not changing the current access level for existing +packages. Specifying a value of `restricted` or `public` during publish will +change the access for an existing package the same way that `npm access set +status` would. #### `dry-run` diff --git a/deps/npm/docs/content/commands/npm-rebuild.md b/deps/npm/docs/content/commands/npm-rebuild.md index 75f1efe1b69641..a6955ec80c7e7c 100644 --- a/deps/npm/docs/content/commands/npm-rebuild.md +++ b/deps/npm/docs/content/commands/npm-rebuild.md @@ -130,12 +130,12 @@ This value is not exported to the environment for child processes. #### `install-links` -* Default: false +* Default: true * Type: Boolean -When set file: protocol dependencies that exist outside of the project root -will be packed and installed as regular dependencies instead of creating a -symlink. This option has no effect on workspaces. +When set file: protocol dependencies will be packed and installed as regular +dependencies instead of creating a symlink. This option has no effect on +workspaces. ### See Also diff --git a/deps/npm/docs/content/commands/npm-repo.md b/deps/npm/docs/content/commands/npm-repo.md index eb55780de907aa..10ddc139f8535a 100644 --- a/deps/npm/docs/content/commands/npm-repo.md +++ b/deps/npm/docs/content/commands/npm-repo.md @@ -13,9 +13,10 @@ npm repo [ [ ...]] ### Description This command tries to guess at the likely location of a package's -repository URL, and then tries to open it using the `--browser` config -param. If no package name is provided, it will search for a `package.json` -in the current folder and use the `repository` property. +repository URL, and then tries to open it using the +[`--browser` config](/using-npm/config#browser) param. If no package name is +provided, it will search for a `package.json` in the current folder and use the +`repository` property. ### Configuration diff --git a/deps/npm/docs/content/commands/npm-run-script.md b/deps/npm/docs/content/commands/npm-run-script.md index 16a5c693f9ba25..26011e4f7959a6 100644 --- a/deps/npm/docs/content/commands/npm-run-script.md +++ b/deps/npm/docs/content/commands/npm-run-script.md @@ -59,7 +59,8 @@ The actual shell your script is run within is platform dependent. By default, on Unix-like systems it is the `/bin/sh` command, on Windows it is `cmd.exe`. The actual shell referred to by `/bin/sh` also depends on the system. -You can customize the shell with the `script-shell` configuration. +You can customize the shell with the +[`script-shell` config](/using-npm/config#script-shell). Scripts are run from the root of the package folder, regardless of what the current working directory is when `npm run` is called. If you want your @@ -76,7 +77,8 @@ forgotten. ### Workspaces support -You may use the `workspace` or `workspaces` configs in order to run an +You may use the [`workspace`](/using-npm/config#workspace) or +[`workspaces`](/using-npm/config#workspaces) configs in order to run an arbitrary command from a package's `"scripts"` object in the context of the specified workspaces. If no `"command"` is provided, it will list the available scripts for each of these configured workspaces. @@ -105,7 +107,8 @@ Assuming the workspace configuration is properly set up at the root level ``` And that each of the configured workspaces has a configured `test` script, -we can run tests in all of them using the `workspaces` config: +we can run tests in all of them using the +[`workspaces` config](/using-npm/config#workspaces): ``` npm test --workspaces diff --git a/deps/npm/docs/content/commands/npm-set-script.md b/deps/npm/docs/content/commands/npm-set-script.md deleted file mode 100644 index 9bbf09ea9076ad..00000000000000 --- a/deps/npm/docs/content/commands/npm-set-script.md +++ /dev/null @@ -1,91 +0,0 @@ ---- -title: npm-set-script -section: 1 -description: Set tasks in the scripts section of package.json ---- - -### Synopsis -An npm command that lets you create a task in the `scripts` section of the `package.json`. - -Deprecated. - -```bash -npm set-script [ + + + + diff --git a/test/fixtures/wpt/dom/events/Event-constructors.any.js b/test/fixtures/wpt/dom/events/Event-constructors.any.js index aced2f3c2cda61..faa623ea92991b 100644 --- a/test/fixtures/wpt/dom/events/Event-constructors.any.js +++ b/test/fixtures/wpt/dom/events/Event-constructors.any.js @@ -1,5 +1,11 @@ // META: title=Event constructors +test(function() { + assert_throws_js( + TypeError, + () => Event(""), + "Calling Event constructor without 'new' must throw") +}) test(function() { assert_throws_js(TypeError, function() { new Event() diff --git a/test/fixtures/wpt/dom/events/event-global-is-still-set-when-reporting-exception-onerror.html b/test/fixtures/wpt/dom/events/event-global-is-still-set-when-reporting-exception-onerror.html new file mode 100644 index 00000000000000..ceaac4fe2b2408 --- /dev/null +++ b/test/fixtures/wpt/dom/events/event-global-is-still-set-when-reporting-exception-onerror.html @@ -0,0 +1,43 @@ + + +window.onerror handler restores window.event after it reports an exception + + + + + + + + + diff --git a/test/fixtures/wpt/dom/events/mouse-event-retarget.html b/test/fixtures/wpt/dom/events/mouse-event-retarget.html new file mode 100644 index 00000000000000..c9ce6240d40cc8 --- /dev/null +++ b/test/fixtures/wpt/dom/events/mouse-event-retarget.html @@ -0,0 +1,26 @@ + + +Script created MouseEvent properly retargets and adjusts offsetX + + + + + +
Hello
+ + diff --git a/test/fixtures/wpt/dom/events/no-focus-events-at-clicking-editable-content-in-link.html b/test/fixtures/wpt/dom/events/no-focus-events-at-clicking-editable-content-in-link.html new file mode 100644 index 00000000000000..dc08636c467fdf --- /dev/null +++ b/test/fixtures/wpt/dom/events/no-focus-events-at-clicking-editable-content-in-link.html @@ -0,0 +1,80 @@ + + + + +Clicking editable content in link shouldn't cause redundant focus related events + + + + + + + +Hello +Hello + + + diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-body.html new file mode 100644 index 00000000000000..5574fe0acbcccb --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-body.html @@ -0,0 +1,19 @@ + +non-passive mousewheel event listener on body + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-div.html new file mode 100644 index 00000000000000..6fbf692cd79d76 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-div.html @@ -0,0 +1,35 @@ + +non-passive mousewheel event listener on div + + + + + + + + + +
+
+
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-document.html new file mode 100644 index 00000000000000..7d07393c699dd5 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-document.html @@ -0,0 +1,19 @@ + +non-passive mousewheel event listener on document + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-root.html new file mode 100644 index 00000000000000..e85fbacaba6a97 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-root.html @@ -0,0 +1,19 @@ + +non-passive mousewheel event listener on root + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-window.html new file mode 100644 index 00000000000000..29b09f85615d61 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-mousewheel-event-listener-on-window.html @@ -0,0 +1,19 @@ + +non-passive mousewheel event listener on window + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-body.html new file mode 100644 index 00000000000000..f417bdd0a66817 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-body.html @@ -0,0 +1,25 @@ + +non-passive touchmove event listener on body + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-div.html new file mode 100644 index 00000000000000..11c9345407566d --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-div.html @@ -0,0 +1,25 @@ + +non-passive touchmove event listener on div + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-document.html new file mode 100644 index 00000000000000..8b95a8d492be36 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-document.html @@ -0,0 +1,25 @@ + +non-passive touchmove event listener on document + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-root.html new file mode 100644 index 00000000000000..c41ab72bd8c645 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-root.html @@ -0,0 +1,25 @@ + +non-passive touchmove event listener on root + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-window.html new file mode 100644 index 00000000000000..3d6675c56615eb --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchmove-event-listener-on-window.html @@ -0,0 +1,25 @@ + + + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-body.html new file mode 100644 index 00000000000000..f6e6ecb06d9c55 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-body.html @@ -0,0 +1,25 @@ + +non-passive touchstart event listener on body + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-div.html new file mode 100644 index 00000000000000..2e7c6e6b3b6992 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-div.html @@ -0,0 +1,25 @@ + +non-passive touchstart event listener on div + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-document.html new file mode 100644 index 00000000000000..22fcbdc322ca27 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-document.html @@ -0,0 +1,25 @@ + +non-passive touchstart event listener on document + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-root.html new file mode 100644 index 00000000000000..56c51349a04f38 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-root.html @@ -0,0 +1,25 @@ + +non-passive touchstart event listener on root + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-window.html new file mode 100644 index 00000000000000..4e9d424a9ded16 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-touchstart-event-listener-on-window.html @@ -0,0 +1,25 @@ + +non-passive touchstart event listener on window + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-body.html new file mode 100644 index 00000000000000..070cadc29187af --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-body.html @@ -0,0 +1,18 @@ + +non-passive wheel event listener on body + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-div.html new file mode 100644 index 00000000000000..c49d18ac139a55 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-div.html @@ -0,0 +1,34 @@ + +non-passive wheel event listener on div + + + + + + + + +
+
+
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-document.html new file mode 100644 index 00000000000000..31a55cad43e17d --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-document.html @@ -0,0 +1,18 @@ + +non-passive wheel event listener on document + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-root.html new file mode 100644 index 00000000000000..b7bacbfc7ceb38 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-root.html @@ -0,0 +1,18 @@ + +non-passive wheel event listener on root + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-window.html new file mode 100644 index 00000000000000..c236059df4dac9 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/non-passive-wheel-event-listener-on-window.html @@ -0,0 +1,18 @@ + +non-passive wheel event listener on window + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-body.html new file mode 100644 index 00000000000000..9db12cfbdc7343 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-body.html @@ -0,0 +1,19 @@ + +passive mousewheel event listener on body + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-div.html new file mode 100644 index 00000000000000..373670856b833e --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-div.html @@ -0,0 +1,35 @@ + +passive mousewheel event listener on div + + + + + + + + + +
+
+
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-document.html new file mode 100644 index 00000000000000..71262280b6fec6 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-document.html @@ -0,0 +1,19 @@ + +passive mousewheel event listener on document + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-root.html new file mode 100644 index 00000000000000..fc641d172e91bb --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-root.html @@ -0,0 +1,19 @@ + +passive mousewheel event listener on root + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-window.html new file mode 100644 index 00000000000000..f60955c7c48895 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-mousewheel-event-listener-on-window.html @@ -0,0 +1,19 @@ + +passive mousewheel event listener on window + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-body.html new file mode 100644 index 00000000000000..2349bad2583434 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-body.html @@ -0,0 +1,25 @@ + +passive touchmove event listener on body + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-div.html new file mode 100644 index 00000000000000..a61b34851e99ae --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-div.html @@ -0,0 +1,25 @@ + +passive touchmove event listener on div + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-document.html new file mode 100644 index 00000000000000..b49971b5b0145e --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-document.html @@ -0,0 +1,25 @@ + +passive touchmove event listener on document + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-root.html new file mode 100644 index 00000000000000..b8517045900863 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-root.html @@ -0,0 +1,25 @@ + +passive touchmove event listener on root + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-window.html new file mode 100644 index 00000000000000..351d6ace845935 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchmove-event-listener-on-window.html @@ -0,0 +1,25 @@ + +passive touchmove event listener on window + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-body.html new file mode 100644 index 00000000000000..c3d2b577fd8c66 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-body.html @@ -0,0 +1,25 @@ + +passive touchstart event listener on body + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-div.html new file mode 100644 index 00000000000000..103e7f0d233a60 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-div.html @@ -0,0 +1,25 @@ + +passive touchstart event listener on div + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-document.html new file mode 100644 index 00000000000000..2e4de2405fb2fb --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-document.html @@ -0,0 +1,25 @@ + +passive touchstart event listener on document + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-root.html new file mode 100644 index 00000000000000..0f52e9a16fe67c --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-root.html @@ -0,0 +1,25 @@ + +passive touchstart event listener on root + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-window.html new file mode 100644 index 00000000000000..c47af8101f2b7b --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-touchstart-event-listener-on-window.html @@ -0,0 +1,25 @@ + +passive touchstart event listener on window + + + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-body.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-body.html new file mode 100644 index 00000000000000..fe0869b0229f5c --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-body.html @@ -0,0 +1,18 @@ + +passive wheel event listener on body + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-div.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-div.html new file mode 100644 index 00000000000000..e2ca6e795ae0f3 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-div.html @@ -0,0 +1,34 @@ + +passive wheel event listener on div + + + + + + + + +
+
+
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-document.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-document.html new file mode 100644 index 00000000000000..61b716f7bbfb58 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-document.html @@ -0,0 +1,18 @@ + +passive wheel event listener on document + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-root.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-root.html new file mode 100644 index 00000000000000..6b383bc871d7aa --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-root.html @@ -0,0 +1,18 @@ + +passive wheel event listener on root + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-window.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-window.html new file mode 100644 index 00000000000000..a1e901f5527d9a --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/passive-wheel-event-listener-on-window.html @@ -0,0 +1,18 @@ + +passive wheel event listener on window + + + + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/scrolling.js b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/scrolling.js new file mode 100644 index 00000000000000..88e10f5efdceab --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/scrolling.js @@ -0,0 +1,34 @@ +function raf() { + return new Promise((resolve) => { + // rAF twice. + window.requestAnimationFrame(() => { + window.requestAnimationFrame(resolve); + }); + }); +} + +async function runTest({target, eventName, passive, expectCancelable}) { + await raf(); + + let cancelable = null; + let arrived = false; + target.addEventListener(eventName, function (event) { + cancelable = event.cancelable; + arrived = true; + }, {passive:passive, once:true}); + + promise_test(async (t) => { + t.add_cleanup(() => { + document.querySelector('.remove-on-cleanup')?.remove(); + }); + const pos_x = Math.floor(window.innerWidth / 2); + const pos_y = Math.floor(window.innerHeight / 2); + const delta_x = 0; + const delta_y = 100; + + await new test_driver.Actions() + .scroll(pos_x, pos_y, delta_x, delta_y).send(); + await t.step_wait(() => arrived, `Didn't get event ${eventName} on ${target.localName}`); + assert_equals(cancelable, expectCancelable); + }); +} diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/touching.js b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/touching.js new file mode 100644 index 00000000000000..620d26804bf8c7 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/touching.js @@ -0,0 +1,34 @@ +function waitForCompositorCommit() { + return new Promise((resolve) => { + // rAF twice. + window.requestAnimationFrame(() => { + window.requestAnimationFrame(resolve); + }); + }); +} + +function injectInput(touchDiv) { + return new test_driver.Actions() + .addPointer("touch_pointer", "touch") + .pointerMove(0, 0, {origin: touchDiv}) + .pointerDown() + .pointerMove(30, 30) + .pointerUp() + .send(); +} + +function runTest({target, eventName, passive, expectCancelable}) { + let touchDiv = document.getElementById("touchDiv"); + let cancelable = null; + let arrived = false; + target.addEventListener(eventName, function (event) { + cancelable = event.cancelable; + arrived = true; + }, {passive}); + promise_test(async () => { + await waitForCompositorCommit(); + await injectInput(touchDiv); + await waitFor(() => arrived); + assert_equals(cancelable, expectCancelable); + }); +} diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/wait-for.js b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/wait-for.js new file mode 100644 index 00000000000000..0bf3e558342fd6 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/resources/wait-for.js @@ -0,0 +1,15 @@ +function waitFor(condition, MAX_FRAME = 500) { + return new Promise((resolve, reject) => { + function tick(frames) { + // We requestAnimationFrame either for MAX_FRAME frames or until condition is + // met. + if (frames >= MAX_FRAME) + reject(new Error(`Condition did not become true after ${MAX_FRAME} frames`)); + else if (condition()) + resolve(); + else + requestAnimationFrame(() => tick(frames + 1)); + } + tick(0); + }); +} diff --git a/test/fixtures/wpt/dom/events/non-cancelable-when-passive/synthetic-events-cancelable.html b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/synthetic-events-cancelable.html new file mode 100644 index 00000000000000..4287770b8d5828 --- /dev/null +++ b/test/fixtures/wpt/dom/events/non-cancelable-when-passive/synthetic-events-cancelable.html @@ -0,0 +1,34 @@ + +Synthetic events are always cancelable by default + + + + diff --git a/test/fixtures/wpt/dom/events/passive-by-default.html b/test/fixtures/wpt/dom/events/passive-by-default.html new file mode 100644 index 00000000000000..02029f4dacf811 --- /dev/null +++ b/test/fixtures/wpt/dom/events/passive-by-default.html @@ -0,0 +1,50 @@ + +Default passive event listeners on window, document, document element, body + + + + +
+ diff --git a/test/fixtures/wpt/dom/events/scrolling/iframe-chains.html b/test/fixtures/wpt/dom/events/scrolling/iframe-chains.html index 2d7e1827adad6f..fb7d674aae13ee 100644 --- a/test/fixtures/wpt/dom/events/scrolling/iframe-chains.html +++ b/test/fixtures/wpt/dom/events/scrolling/iframe-chains.html @@ -39,7 +39,8 @@ promise_test(async t => { await new test_driver.Actions().scroll(50, 50, 0, 50).send(); - assert_equals(scroller.scrollTop, 50); + // Allow the possibility the scroll is not fully synchronous + await t.step_wait(() => scroller.scrollTop === 50); }, "Wheel scroll in iframe chains to containing element."); diff --git a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js index 0a73f34fefc8ab..169393e4c3e419 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js +++ b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js @@ -1,8 +1,22 @@ +async function waitForScrollendEvent(test, target, timeoutMs = 500) { + return new Promise((resolve, reject) => { + const timeoutCallback = test.step_timeout(() => { + reject(`No Scrollend event received for target ${target}`); + }, timeoutMs); + target.addEventListener('scrollend', (evt) => { + clearTimeout(timeoutCallback); + resolve(evt); + }, { once: true }); + }); +} + const MAX_FRAME = 700; const MAX_UNCHANGED_FRAMES = 20; // Returns a promise that resolves when the given condition is met or rejects // after MAX_FRAME animation frames. +// TODO(crbug.com/1400399): deprecate. We should not use frame based waits in +// WPT as frame rates may vary greatly in different testing environments. function waitFor(condition, error_message = 'Reaches the maximum frames.') { return new Promise((resolve, reject) => { function tick(frames) { @@ -19,6 +33,9 @@ function waitFor(condition, error_message = 'Reaches the maximum frames.') { }); } +// TODO(crbug.com/1400446): Test driver should defer sending events until the +// browser is ready. Also the term compositor-commit is misleading as not all +// user-agents use a compositor process. function waitForCompositorCommit() { return new Promise((resolve) => { // rAF twice. @@ -28,6 +45,8 @@ function waitForCompositorCommit() { }); } +// TODO(crbug.com/1400399): Deprecate as frame rates may vary greatly in +// different test environments. function waitForAnimationEnd(getValue) { var last_changed_frame = 0; var last_position = getValue(); @@ -50,6 +69,31 @@ function waitForAnimationEnd(getValue) { }) } +// Scrolls in target according to move_path with pauses in between +function touchScrollInTargetSequentiallyWithPause(target, move_path, pause_time_in_ms = 100) { + const test_driver_actions = new test_driver.Actions() + .addPointer("pointer1", "touch") + .pointerMove(0, 0, {origin: target}) + .pointerDown(); + + const substeps = 5; + let x = 0; + let y = 0; + // Do each move in 5 steps + for(let move of move_path) { + let step_x = (move.x - x) / substeps; + let step_y = (move.y - y) / substeps; + for(let step = 0; step < substeps; step++) { + x += step_x; + y += step_y; + test_driver_actions.pointerMove(x, y, {origin: target}); + } + test_driver_actions.pause(pause_time_in_ms); + } + + return test_driver_actions.pointerUp().send(); +} + function touchScrollInTarget(pixels_to_scroll, target, direction, pause_time_in_ms = 100) { var x_delta = 0; var y_delta = 0; @@ -61,7 +105,7 @@ function touchScrollInTarget(pixels_to_scroll, target, direction, pause_time_in_ } else if (direction == "right") { x_delta = -1 * pixels_to_scroll / num_movs; } else if (direction == "left") { - x_delta = pixels_to_scroll / num_movs;; + x_delta = pixels_to_scroll / num_movs; } else { throw("scroll direction '" + direction + "' is not expected, direction should be 'down', 'up', 'left' or 'right'"); } @@ -95,3 +139,25 @@ function mouseActionsInTarget(target, origin, delta, pause_time_in_ms = 100) { .pointerUp() .send(); } + +// Returns a promise that resolves when the given condition holds for 10 +// animation frames or rejects if the condition changes to false within 10 +// animation frames. +// TODO(crbug.com/1400399): Deprecate as frame rates may very greatly in +// different test environments. +function conditionHolds(condition, error_message = 'Condition is not true anymore.') { + const MAX_FRAME = 10; + return new Promise((resolve, reject) => { + function tick(frames) { + // We requestAnimationFrame either for 10 frames or until condition is + // violated. + if (frames >= MAX_FRAME) + resolve(); + else if (!condition()) + reject(error_message); + else + requestAnimationFrame(tick.bind(this, frames + 1)); + } + tick(0); + }); +} diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html new file mode 100644 index 00000000000000..77bf029ced58c5 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html @@ -0,0 +1,63 @@ + + + + + + + + + + + +
+
+
+
+ + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-snap.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-snap.html index ef1b495791cad1..03079ddc6cad8c 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-snap.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-snap.html @@ -1,4 +1,5 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html index 79b5f5f0186871..c6569e0bebbd9f 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html @@ -1,5 +1,6 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html index 63e1c3e22eaafc..8782b1dfee6237 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html @@ -1,5 +1,6 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html index 99c1c6930fab0b..30904553883435 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html @@ -1,4 +1,5 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html index 0269c66fdde192..acad168e56c995 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html @@ -1,4 +1,5 @@ + @@ -80,12 +81,20 @@ 'on target.'); assert_equals(target_div.scrollLeft, 0); - // Scroll up on target div and wait for the element with overscroll-y to get - // scrollend event. + let touchEndPromise = new Promise((resolve) => { + target_div.addEventListener("touchend", resolve); + }); await touchScrollInTarget(300, target_div, 'up'); - await waitFor(() => { return vertical_scrollend_arrived; }, - 'Expected element did not receive scrollend event after scroll up on ' + - 'target.'); + + // The scrollend event should never be fired before the gesture has completed. + await touchEndPromise; + + // Ensure we wait at least a tick after the touch end. + await waitForCompositorCommit(); + + // We should not trigger a scrollend event for a scroll that did not change + // the scroll position. + assert_equals(vertical_scrollend_arrived, false); assert_equals(target_div.scrollTop, 0); }, 'Tests that the last element in the cut scroll chain gets scrollend ' + 'event when no element scrolls by touch.'); diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html index 87cad79df7c2af..734339694220cc 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html @@ -1,4 +1,5 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html index f9510e6e231615..ef72f56d2ba9d6 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html @@ -1,4 +1,5 @@ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html index 30f16571cd8320..5146c5f719a1e4 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html @@ -1,6 +1,7 @@ + @@ -29,110 +30,170 @@ diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-handler-content-attributes.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-handler-content-attributes.html new file mode 100644 index 00000000000000..47f563c39bd907 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-handler-content-attributes.html @@ -0,0 +1,108 @@ + + + + + + + + + + + +
+
+
+
+ + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-not-fired-after-removing-scroller.tentative.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-not-fired-after-removing-scroller.tentative.html new file mode 100644 index 00000000000000..95447fbd12ea2a --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-not-fired-after-removing-scroller.tentative.html @@ -0,0 +1,84 @@ + + + + + + + + + + + + + + diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 9200762e6f9255..2e17ed940d47e8 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -12,7 +12,7 @@ "path": "dom/abort" }, "dom/events": { - "commit": "f8821adb281696322f4bd96d412a98ae510f9125", + "commit": "ab8999891c6225bef1741c2960033aad620481a8", "path": "dom/events" }, "encoding": { diff --git a/test/wpt/status/dom/events.json b/test/wpt/status/dom/events.json index 012b73f70001ae..95fbda98402b14 100644 --- a/test/wpt/status/dom/events.json +++ b/test/wpt/status/dom/events.json @@ -19,8 +19,8 @@ "Event-constructors.any.js": { "fail": { "expected": [ - "Untitled 2", - "Untitled 3" + "Untitled 3", + "Untitled 4" ] } }, From 8fac4c5684b4b3e6e0b73fb227b11fbc9734e563 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Wed, 11 Jan 2023 05:11:06 +0900 Subject: [PATCH 136/191] perf_hooks: fix checking range of `options.figures` in createHistogram For `options.figures`, number between 1 and 5 is allowed. So need to use `validateInteger` to limit max as 5. Refs: https://github.com/nodejs/node/blob/main/doc/api/perf_hooks.md#perf_hookscreatehistogramoptions PR-URL: https://github.com/nodejs/node/pull/45999 Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca --- lib/internal/histogram.js | 3 +-- test/parallel/test-perf-hooks-histogram.js | 7 +++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/internal/histogram.js b/lib/internal/histogram.js index ca540e555e0160..92a679e8f3a634 100644 --- a/lib/internal/histogram.js +++ b/lib/internal/histogram.js @@ -36,7 +36,6 @@ const { validateInteger, validateNumber, validateObject, - validateUint32, } = require('internal/validators'); const kDestroy = Symbol('kDestroy'); @@ -368,7 +367,7 @@ function createHistogram(options = kEmptyObject) { } else if (highest < 2n * lowest) { throw new ERR_INVALID_ARG_VALUE.RangeError('options.highest', highest); } - validateUint32(figures, 'options.figures', 1, 5); + validateInteger(figures, 'options.figures', 1, 5); return internalRecordableHistogram(new _Histogram(lowest, highest, figures)); } diff --git a/test/parallel/test-perf-hooks-histogram.js b/test/parallel/test-perf-hooks-histogram.js index 1bcc59653bb692..37fcdfb3fca06c 100644 --- a/test/parallel/test-perf-hooks-histogram.js +++ b/test/parallel/test-perf-hooks-histogram.js @@ -135,6 +135,13 @@ const { inspect } = require('util'); }); }); + // Number greater than 5 is not allowed + for (const i of [6, 10]) { + throws(() => createHistogram({ figures: i }), { + code: 'ERR_OUT_OF_RANGE', + }); + } + createHistogram({ lowest: 1, highest: 11, figures: 1 }); } From d0f905bd6f7408cfe3759b203cf4d85df389dd56 Mon Sep 17 00:00:00 2001 From: Marco Ippolito Date: Wed, 11 Jan 2023 09:16:27 +0100 Subject: [PATCH 137/191] doc: duplex and readable from uncaught execption warning PR-URL: https://github.com/nodejs/node/pull/46135 Fixes: https://github.com/nodejs/node/issues/46071 Reviewed-By: Paolo Insogna Reviewed-By: Matteo Collina --- doc/api/stream.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/doc/api/stream.md b/doc/api/stream.md index 613a0b7c87a8d6..2d0103349121f6 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -2890,6 +2890,18 @@ Calling `Readable.from(string)` or `Readable.from(buffer)` will not have the strings or buffers be iterated to match the other streams semantics for performance reasons. +If an `Iterable` object containing promises is passed as an argument, +it might result in unhandled rejection. + +```js +const { Readable } = require('node:stream'); + +Readable.from([ + new Promise((resolve) => setTimeout(resolve('1'), 1500)), + new Promise((_, reject) => setTimeout(reject(new Error('2')), 1000)), // Unhandled rejection +]); +``` + ### `stream.Readable.fromWeb(readableStream[, options])` +> Stability: 3 - Legacy: Use [`event.stopPropagation()`][] instead. + * Type: {boolean} Alias for `event.stopPropagation()` if set to `true`. This is not used @@ -2162,6 +2164,7 @@ to the `EventTarget`. [`emitter.listenerCount()`]: #emitterlistenercounteventname [`emitter.removeListener()`]: #emitterremovelistenereventname-listener [`emitter.setMaxListeners(n)`]: #emittersetmaxlistenersn +[`event.stopPropagation()`]: #eventstoppropagation [`event.target`]: #eventtarget [`events.defaultMaxListeners`]: #eventsdefaultmaxlisteners [`fs.ReadStream`]: fs.md#class-fsreadstream From d251a66bed09994c71c9ea2cde493ec8590a24d2 Mon Sep 17 00:00:00 2001 From: Luigi Pinca Date: Wed, 11 Jan 2023 20:29:33 +0100 Subject: [PATCH 139/191] deps: add /deps/**/.github/ to .gitignore MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Ignore `.github` directories in the `deps` directory. - Remove `deps/base64/base64/.github/`. PR-URL: https://github.com/nodejs/node/pull/46091 Reviewed-By: Colin Ihrig Reviewed-By: Ruy Adorno Reviewed-By: Michaël Zasso --- .gitignore | 5 +- deps/base64/base64/.github/workflows/test.yml | 133 ------------------ 2 files changed, 3 insertions(+), 135 deletions(-) delete mode 100644 deps/base64/base64/.github/workflows/test.yml diff --git a/.gitignore b/.gitignore index 11c39db88944c7..9881176886826b 100644 --- a/.gitignore +++ b/.gitignore @@ -136,10 +136,11 @@ tools/*/*.i.tmp /deps/npm/node_modules/.bin/ # Respect V8's .gitignore !deps/v8/** -# Ignore the libuv book and GitHub templates -/deps/uv/.github/ +# Ignore the libuv book /deps/uv/docs/code/ /deps/uv/docs/src/guide/ +# Ignore .github directories +/deps/**/.github/ # Ignore dependencies fetched by tools/v8/fetch_deps.py /deps/.cipd diff --git a/deps/base64/base64/.github/workflows/test.yml b/deps/base64/base64/.github/workflows/test.yml deleted file mode 100644 index 68342a42442eb0..00000000000000 --- a/deps/base64/base64/.github/workflows/test.yml +++ /dev/null @@ -1,133 +0,0 @@ -name: Test - -on: [push, pull_request] - -jobs: - makefile-test: - name: makefile-${{ matrix.runner }}-amd64-${{ matrix.compiler }} ${{ ((matrix.openmp == 1) && '+openmp') || '' }} - runs-on: ${{ matrix.runner }} - strategy: - fail-fast: false - matrix: - runner: ["ubuntu-18.04"] - compiler: ["gcc", "clang"] - openmp: ["0", "1"] - include: - - runner: "macos-11" - compiler: "clang" - openmp: "0" - env: - OPENMP: ${{ matrix.openmp }} - OMP_NUM_THREADS: ${{ ((matrix.openmp == 1) && '2') || '0' }} - CC: ${{ matrix.compiler }} - OBJCOPY: ${{ (startsWith(matrix.runner, 'macos') && 'echo') || 'objcopy' }} - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Run tests - run: ./test/ci/test.sh - - cmake-test: - name: cmake-${{ matrix.runner }} - runs-on: ${{ matrix.runner }} - strategy: - fail-fast: false - matrix: - runner: ["ubuntu-18.04", "macos-11", "windows-2019"] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: CMake Configure - run: > - cmake - -B out - -Werror=dev - -DBASE64_BUILD_TESTS=ON - ${{ runner.os != 'Windows' && '-DCMAKE_BUILD_TYPE=Release' || '' }} - ${{ runner.os == 'macOS' && '-DBASE64_WITH_AVX2=OFF' || '' }} - - name: CMake Build - run: cmake --build out --config Release --verbose - - name: CTest - run: ctest --no-tests=error --test-dir out -VV --build-config Release - - alpine-makefile-test: - name: makefile-alpine-amd64-gcc - runs-on: ubuntu-latest - container: - image: alpine:3.12 - env: - CC: gcc - steps: - - name: Install deps - run: apk add --update bash build-base git - - name: Checkout - uses: actions/checkout@v3 - - name: Run tests - run: ./test/ci/test.sh - - alpine-cmake-test: - name: cmake-alpine-amd64-gcc - runs-on: ubuntu-latest - container: - image: alpine:3.12 - steps: - - name: Install deps - run: apk add --update bash build-base cmake git - - name: Checkout - uses: actions/checkout@v3 - - name: CMake Configure - run: cmake -B out -Werror=dev -DBASE64_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE=Release - - name: CMake Build - run: cmake --build out --config Release --verbose - - name: CTest - run: ctest --no-tests=error -VV --build-config Release - working-directory: ./out - - alpine-alt-arch-makefile-test: - name: makefile-alpine-${{matrix.arch}}-${{matrix.cc}} - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - arch: [armv7, aarch64, s390x, ppc64le] - cc: [gcc, clang] - steps: - - name: Checkout - uses: actions/checkout@v3 - - uses: uraimo/run-on-arch-action@v2 - with: - arch: ${{matrix.arch}} - distro: alpine_latest - env: | - CC: ${{matrix.cc}} - install: apk add --update bash build-base cmake git ${{matrix.cc}} - run: ./test/ci/test.sh - - alpine-alt-arch-cmake-test: - name: cmake-alpine-${{matrix.arch}}-${{matrix.cc}} - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - arch: [armv7, aarch64, s390x, ppc64le] - cc: [gcc, clang] - steps: - - name: Checkout - uses: actions/checkout@v3 - - uses: uraimo/run-on-arch-action@v2 - with: - arch: ${{matrix.arch}} - distro: alpine_latest - env: | - CC: ${{matrix.cc}} - install: apk add --update bash build-base cmake git ${{matrix.cc}} - run: | - echo "::group::CMake Configure" - cmake -B out -Werror=dev -DBASE64_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE=Release - echo "::endgroup::CMake Configure" - echo "::group::CMake Build" - cmake --build out --config Release --verbose - echo "::endgroup::CMake Build" - echo "::group::CTest" - ctest --no-tests=error --test-dir out -VV --build-config Release - echo "::endgroup::CTest" From ec34cad712874dee942f7ad8176a7db67d33ef04 Mon Sep 17 00:00:00 2001 From: theanarkh Date: Thu, 12 Jan 2023 03:58:41 +0800 Subject: [PATCH 140/191] doc: add PerformanceObserver.supportedEntryTypes to doc PR-URL: https://github.com/nodejs/node/pull/45962 Reviewed-By: Luigi Pinca --- doc/api/perf_hooks.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/api/perf_hooks.md b/doc/api/perf_hooks.md index fbc36572ae4386..58131a4a0a931f 100644 --- a/doc/api/perf_hooks.md +++ b/doc/api/perf_hooks.md @@ -917,6 +917,20 @@ Returns a `object` that is the JSON representation of the ## Class: `perf_hooks.PerformanceObserver` + + +### `PerformanceObserver.supportedEntryTypes` + + + +* {string\[]} + +Get supported types. + ### `new PerformanceObserver(callback)` -* {number} **Default:** `0` +* {number} **Default:** `300000` See [`server.requestTimeout`][] in the `node:http` module. From 38df6621193418844ad711dea5c645809dafe84e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Fri, 13 Jan 2023 09:55:46 +0100 Subject: [PATCH 147/191] tools: update create-or-update-pull-request-action Fixes GitHub's `set-output` deprecation. PR-URL: https://github.com/nodejs/node/pull/46169 Reviewed-By: Antoine du Hamel Reviewed-By: Darshan Sen Reviewed-By: Richard Lau Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- .github/workflows/authors.yml | 2 +- .github/workflows/find-inactive-collaborators.yml | 2 +- .github/workflows/find-inactive-tsc.yml | 2 +- .github/workflows/license-builder.yml | 2 +- .github/workflows/timezone-update.yml | 2 +- .github/workflows/tools.yml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/authors.yml b/.github/workflows/authors.yml index 5d5c736fabb00a..934b8d547e44aa 100644 --- a/.github/workflows/authors.yml +++ b/.github/workflows/authors.yml @@ -19,7 +19,7 @@ jobs: fetch-depth: '0' # This is required to actually get all the authors persist-credentials: false - run: tools/update-authors.mjs # Run the AUTHORS tool - - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee + - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index 8fa954fd0f8128..33b63389934f5e 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -33,7 +33,7 @@ jobs: run: tools/find-inactive-collaborators.mjs - name: Open pull request - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee + uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index b4af0876fbdb97..452a428959cef1 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -42,7 +42,7 @@ jobs: run: tools/find-inactive-tsc.mjs >> $GITHUB_ENV - name: Open pull request - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee + uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index a58e0bf8bbbe0a..a6732b149c01f5 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -21,7 +21,7 @@ jobs: with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool - - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee + - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index 27cbfd2946a1d2..a28434f2d9be74 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -40,7 +40,7 @@ jobs: run: echo "${{ env.new_version }}" > test/fixtures/tz-version.txt - name: Open Pull Request - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee # Create a PR or update the Action's existing PR + uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Create a PR or update the Action's existing PR env: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} with: diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 3047124a782f67..f2d1a3a839c8d5 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -141,7 +141,7 @@ jobs: - run: ${{ matrix.run }} env: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} - - uses: gr2m/create-or-update-pull-request-action@dc1726cbf4dd3ce766af4ec29cfb660e0125e8ee + - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: From 53ecd20bbd4c69864b318ada82de966138031006 Mon Sep 17 00:00:00 2001 From: Shelley Vohr Date: Fri, 13 Jan 2023 13:31:53 +0100 Subject: [PATCH 148/191] src: remove return after abort PR-URL: https://github.com/nodejs/node/pull/46172 Reviewed-By: Anna Henningsen Reviewed-By: Yagiz Nizipli Reviewed-By: Colin Ihrig Reviewed-By: Darshan Sen Reviewed-By: Richard Lau Reviewed-By: James M Snell --- src/node_builtins.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/node_builtins.cc b/src/node_builtins.cc index ca2efa633be88d..fc671eb88a856f 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -244,7 +244,6 @@ void BuiltinLoader::AddExternalizedBuiltin(const char* id, fprintf( stderr, "Cannot load externalized builtin: \"%s:%s\".\n", id, filename); ABORT(); - return; } Add(id, source); From 3d1dd96c4fe2a1219156c63a8689015a66bb7414 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Fri, 13 Jan 2023 23:39:54 +0900 Subject: [PATCH 149/191] http: refactor to use min of validateNumber for maxTotalSockets MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove duplicate implementation by using min of validateNumber. PR-URL: https://github.com/nodejs/node/pull/46115 Reviewed-By: Paolo Insogna Reviewed-By: Matteo Collina Reviewed-By: Tobias Nießen Reviewed-By: James M Snell --- lib/_http_agent.js | 11 +---------- test/parallel/test-http-agent-maxtotalsockets.js | 2 -- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/lib/_http_agent.js b/lib/_http_agent.js index 791a43c8d5d029..774c5824658434 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -30,7 +30,6 @@ const { ArrayPrototypeSome, ArrayPrototypeSplice, FunctionPrototypeCall, - NumberIsNaN, ObjectCreate, ObjectKeys, ObjectSetPrototypeOf, @@ -49,11 +48,6 @@ let debug = require('internal/util/debuglog').debuglog('http', (fn) => { }); const { AsyncResource } = require('async_hooks'); const { async_id_symbol } = require('internal/async_hooks').symbols; -const { - codes: { - ERR_OUT_OF_RANGE, - }, -} = require('internal/errors'); const { kEmptyObject, once, @@ -123,10 +117,7 @@ function Agent(options) { validateOneOf(this.scheduling, 'scheduling', ['fifo', 'lifo']); if (this.maxTotalSockets !== undefined) { - validateNumber(this.maxTotalSockets, 'maxTotalSockets'); - if (this.maxTotalSockets <= 0 || NumberIsNaN(this.maxTotalSockets)) - throw new ERR_OUT_OF_RANGE('maxTotalSockets', '> 0', - this.maxTotalSockets); + validateNumber(this.maxTotalSockets, 'maxTotalSockets', 1); } else { this.maxTotalSockets = Infinity; } diff --git a/test/parallel/test-http-agent-maxtotalsockets.js b/test/parallel/test-http-agent-maxtotalsockets.js index c44c8db627d330..fce1bf8de83144 100644 --- a/test/parallel/test-http-agent-maxtotalsockets.js +++ b/test/parallel/test-http-agent-maxtotalsockets.js @@ -20,8 +20,6 @@ assert.throws(() => new http.Agent({ }), { code: 'ERR_OUT_OF_RANGE', name: 'RangeError', - message: 'The value of "maxTotalSockets" is out of range. ' + - `It must be > 0. Received ${item}`, }); }); From 277d9da876816697ae80942e19a7b73a5047bdf0 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Sat, 14 Jan 2023 09:21:54 +0900 Subject: [PATCH 150/191] vm: refactor to use validate function Throwing error after checking type is repeated. So replace it with validate function. PR-URL: https://github.com/nodejs/node/pull/46176 Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Antoine du Hamel Reviewed-By: James M Snell --- lib/internal/vm/module.js | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/lib/internal/vm/module.js b/lib/internal/vm/module.js index 7e0131c7db2872..7c0890c967ebc6 100644 --- a/lib/internal/vm/module.js +++ b/lib/internal/vm/module.js @@ -21,7 +21,6 @@ const { const { isContext } = internalBinding('contextify'); const { isModuleNamespaceObject, - isArrayBufferView, } = require('internal/util/types'); const { customInspectSymbol, @@ -41,6 +40,7 @@ const { } = require('internal/errors').codes; const { validateBoolean, + validateBuffer, validateFunction, validateInt32, validateObject, @@ -276,25 +276,16 @@ class SourceTextModule extends Module { validateInt32(lineOffset, 'options.lineOffset'); validateInt32(columnOffset, 'options.columnOffset'); - if (initializeImportMeta !== undefined && - typeof initializeImportMeta !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'options.initializeImportMeta', 'function', initializeImportMeta); + if (initializeImportMeta !== undefined) { + validateFunction(initializeImportMeta, 'options.initializeImportMeta'); } - if (importModuleDynamically !== undefined && - typeof importModuleDynamically !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'options.importModuleDynamically', 'function', - importModuleDynamically); + if (importModuleDynamically !== undefined) { + validateFunction(importModuleDynamically, 'options.importModuleDynamically'); } - if (cachedData !== undefined && !isArrayBufferView(cachedData)) { - throw new ERR_INVALID_ARG_TYPE( - 'options.cachedData', - ['Buffer', 'TypedArray', 'DataView'], - cachedData - ); + if (cachedData !== undefined) { + validateBuffer(cachedData, 'options.cachedData'); } super({ From d7a8c076e17d7ec9a89d2b15f22143a678f73012 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 13 Jan 2023 17:10:38 -0800 Subject: [PATCH 151/191] doc: use "file system" instead of "filesystem" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/nodejs/node/pull/44004#discussion_r1067599238 PR-URL: https://github.com/nodejs/node/pull/46178 Reviewed-By: James M Snell Reviewed-By: Richard Lau Reviewed-By: Matteo Collina Reviewed-By: Ben Noordhuis Reviewed-By: Luigi Pinca Reviewed-By: Yagiz Nizipli Reviewed-By: Gerhard Stöbich Reviewed-By: Tobias Nießen Reviewed-By: Harshitha K P --- doc/api/addons.md | 2 +- doc/api/fs.md | 6 +++--- doc/api/modules.md | 2 +- doc/api/net.md | 4 ++-- doc/api/report.md | 6 +++--- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/doc/api/addons.md b/doc/api/addons.md index 1e1d4751861d04..4075158e2ae750 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -27,7 +27,7 @@ involving knowledge of several components and APIs: threads and all of the asynchronous behaviors of the platform. It also serves as a cross-platform abstraction library, giving easy, POSIX-like access across all major operating systems to many common system tasks, such - as interacting with the filesystem, sockets, timers, and system events. libuv + as interacting with the file system, sockets, timers, and system events. libuv also provides a threading abstraction similar to POSIX threads for more sophisticated asynchronous addons that need to move beyond the standard event loop. Addon authors should diff --git a/doc/api/fs.md b/doc/api/fs.md index fbec52cbf1ef47..f8de227a9ad848 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1139,7 +1139,7 @@ changes: * `prefix` {string} * `options` {string|Object} * `encoding` {string} **Default:** `'utf8'` -* Returns: {Promise} Fulfills with a string containing the filesystem path +* Returns: {Promise} Fulfills with a string containing the file system path of the newly created temporary directory. Creates a unique temporary directory. A unique directory name is generated by @@ -4382,7 +4382,7 @@ renamed. An `EPERM` error is reported when the watched directory is deleted. This feature depends on the underlying operating system providing a way -to be notified of filesystem changes. +to be notified of file system changes. * On Linux systems, this uses [`inotify(7)`][]. * On BSD systems, this uses [`kqueue(2)`][]. @@ -6898,7 +6898,7 @@ The times in the stat object have the following semantics: link(2), mknod(2), rename(2), unlink(2), utimes(2), read(2), and write(2) system calls. * `birthtime` "Birth Time": Time of file creation. Set once when the - file is created. On filesystems where birthtime is not available, + file is created. On file systems where birthtime is not available, this field may instead hold either the `ctime` or `1970-01-01T00:00Z` (ie, Unix epoch timestamp `0`). This value may be greater than `atime` or `mtime` in this case. On Darwin and other FreeBSD variants, diff --git a/doc/api/modules.md b/doc/api/modules.md index eb54d7921f29a3..02fae47d88ea69 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -191,7 +191,7 @@ require(X) from module at path Y a. return the core module b. STOP 2. If X begins with '/' - a. set Y to be the filesystem root + a. set Y to be the file system root 3. If X begins with './' or '/' or '../' a. LOAD_AS_FILE(Y + X) b. LOAD_AS_DIRECTORY(Y + X) diff --git a/doc/api/net.md b/doc/api/net.md index 18837f5a0d5bdf..1e9288d9c734a3 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -29,7 +29,7 @@ sockets on other operating systems. [`socket.connect()`][] take a `path` parameter to identify IPC endpoints. On Unix, the local domain is also known as the Unix domain. The path is a -filesystem pathname. It gets truncated to an OS-dependent length of +file system pathname. It gets truncated to an OS-dependent length of `sizeof(sockaddr_un.sun_path) - 1`. Typical values are 107 bytes on Linux and 103 bytes on macOS. If a Node.js API abstraction creates the Unix domain socket, it will unlink the Unix domain socket as well. For example, @@ -37,7 +37,7 @@ it will unlink the Unix domain socket as well. For example, [`server.close()`][] will unlink it. But if a user creates the Unix domain socket outside of these abstractions, the user will need to remove it. The same applies when a Node.js API creates a Unix domain socket but the program then -crashes. In short, a Unix domain socket will be visible in the filesystem and +crashes. In short, a Unix domain socket will be visible in the file system and will persist until unlinked. On Windows, the local domain is implemented using a named pipe. The path _must_ diff --git a/doc/api/report.md b/doc/api/report.md index fc005d6e0e8dda..213f7126960027 100644 --- a/doc/api/report.md +++ b/doc/api/report.md @@ -567,9 +567,9 @@ In cases where standard streams are used, the value in `directory` is ignored. URLs are not supported. Defaults to a composite filename that contains timestamp, PID, and sequence number. -`directory` specifies the filesystem directory where the report will be written. -URLs are not supported. Defaults to the current working directory of the -Node.js process. +`directory` specifies the file system directory where the report will be +written. URLs are not supported. Defaults to the current working directory of +the Node.js process. ```js // Trigger report only on uncaught exceptions. From 1b0cc79785313cc56e2a476491685d0f13dcc61b Mon Sep 17 00:00:00 2001 From: Darshan Sen Date: Sat, 14 Jan 2023 13:19:36 +0530 Subject: [PATCH 152/191] tools: add automation for updating postject dependency Signed-off-by: Darshan Sen PR-URL: https://github.com/nodejs/node/pull/46157 Reviewed-By: Antoine du Hamel Reviewed-By: Michael Dawson --- .github/workflows/tools.yml | 10 ++++++++++ doc/contributing/maintaining-postject.md | 23 +++++++++++++++++++++++ tools/dep_updaters/README.md | 20 ++++++++++++++++++++ tools/dep_updaters/update-postject.sh | 22 ++++++++++++++++++++++ 4 files changed, 75 insertions(+) create mode 100644 doc/contributing/maintaining-postject.md create mode 100755 tools/dep_updaters/update-postject.sh diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index f2d1a3a839c8d5..a056aa705bd246 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -77,6 +77,16 @@ jobs: echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV ./tools/update-undici.sh fi + - id: postject + subsystem: deps + label: dependencies + run: | + NEW_VERSION=$(npm view postject dist-tags.latest) + CURRENT_VERSION=$(node -p "require('./test/fixtures/postject-copy/node_modules/postject/package.json').version") + if [ "$NEW_VERSION" != "$CURRENT_VERSION" ]; then + echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV + ./tools/dep_updaters/update-postject.sh + fi - id: base64 subsystem: deps label: dependencies diff --git a/doc/contributing/maintaining-postject.md b/doc/contributing/maintaining-postject.md new file mode 100644 index 00000000000000..96746e22796900 --- /dev/null +++ b/doc/contributing/maintaining-postject.md @@ -0,0 +1,23 @@ +# Maintaining postject + +The [postject](https://github.com/nodejs/postject) dependency is used for the +[Single Executable strategic initiative](https://github.com/nodejs/single-executable). + +## Updating postject + +The `tools/dep_updaters/update-postject.sh` script automates the update of the +postject source files. + +Check that Node.js still builds and tests. + +## Committing postject + +Add postject: `git add --all test/fixtures/postject-copy` + +Commit the changes with a message like: + +```text +deps: update postject to 1.0.0-alpha.4 + +Updated as described in doc/contributing/maintaining-postject.md. +``` diff --git a/tools/dep_updaters/README.md b/tools/dep_updaters/README.md index 64c59c9d7d5f99..bae6d2be2d0943 100644 --- a/tools/dep_updaters/README.md +++ b/tools/dep_updaters/README.md @@ -60,3 +60,23 @@ been created with the changes), do the following: 3. Check that Node.js compiles without errors and the tests pass. 4. Create a commit for the update and in the commit message include the important/relevant items from the changelog. + +## postject + +The `update-postject.sh` script downloads postject from the [npm package](http://npmjs.com/package/postject) +and uses it to replace the contents of `test/fixtures/postject-copy`. + +In order to update, the following command can be run: + +```bash +./tools/dep_updaters/update-postject.sh +``` + +Once the script has run (either manually, or by CI in which case a PR will have +been created with the changes), do the following: + +1. Check the [changelog](https://github.com/nodejs/postject/releases/tag/v1.0.0-alpha.4) + for things that might require changes in Node.js. +2. Check that Node.js compiles without errors and the tests pass. +3. Create a commit for the update and in the commit message include the + important/relevant items from the changelog. diff --git a/tools/dep_updaters/update-postject.sh b/tools/dep_updaters/update-postject.sh new file mode 100755 index 00000000000000..ceda1866d237c3 --- /dev/null +++ b/tools/dep_updaters/update-postject.sh @@ -0,0 +1,22 @@ +#!/bin/sh + +# Shell script to update postject in the source tree to the latest release. + +# This script must be in the tools directory when it runs because it uses the +# script source file path to determine directories to work in. + +set -ex + +cd "$( dirname "$0" )/../.." || exit +rm -rf test/fixtures/postject-copy +mkdir test/fixtures/postject-copy +cd test/fixtures/postject-copy || exit + +ROOT="$PWD/../../.." +[ -z "$NODE" ] && NODE="$ROOT/out/Release/node" +[ -x "$NODE" ] || NODE=$(command -v node) +NPM="$ROOT/deps/npm/bin/npm-cli.js" + +"$NODE" "$NPM" init --yes + +"$NODE" "$NPM" install --no-bin-links --ignore-scripts postject From d6fc855b8a9ee1e8f247a7c155a97fa84b549ad6 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Sat, 14 Jan 2023 18:52:26 +0900 Subject: [PATCH 153/191] lib: refactor to use validate function Throwing error after checking type is repeated. So replace it with validate function. PR-URL: https://github.com/nodejs/node/pull/46101 Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca Reviewed-By: Minwoo Jung --- lib/_http_client.js | 7 +++---- lib/_tls_wrap.js | 19 +++++++------------ lib/async_hooks.js | 7 ++----- 3 files changed, 12 insertions(+), 21 deletions(-) diff --git a/lib/_http_client.js b/lib/_http_client.js index 4c2f39ed067a83..3324cf69f1f074 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -228,11 +228,10 @@ function ClientRequest(input, options, cb) { this.maxHeaderSize = maxHeaderSize; const insecureHTTPParser = options.insecureHTTPParser; - if (insecureHTTPParser !== undefined && - typeof insecureHTTPParser !== 'boolean') { - throw new ERR_INVALID_ARG_TYPE( - 'options.insecureHTTPParser', 'boolean', insecureHTTPParser); + if (insecureHTTPParser !== undefined) { + validateBoolean(insecureHTTPParser, 'options.insecureHTTPParser'); } + this.insecureHTTPParser = insecureHTTPParser; if (options.joinDuplicateHeaders !== undefined) { diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index 65db04e76901b0..78fe3191dc86d5 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -1224,21 +1224,16 @@ function Server(options, listener) { validateNumber(this[kHandshakeTimeout], 'options.handshakeTimeout'); - if (this[kSNICallback] && typeof this[kSNICallback] !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'options.SNICallback', 'function', options.SNICallback); + if (this[kSNICallback]) { + validateFunction(this[kSNICallback], 'options.SNICallback'); } - if (this[kPskCallback] && typeof this[kPskCallback] !== 'function') { - throw new ERR_INVALID_ARG_TYPE( - 'options.pskCallback', 'function', options.pskCallback); + if (this[kPskCallback]) { + validateFunction(this[kPskCallback], 'options.pskCallback'); } - if (this[kPskIdentityHint] && typeof this[kPskIdentityHint] !== 'string') { - throw new ERR_INVALID_ARG_TYPE( - 'options.pskIdentityHint', - 'string', - options.pskIdentityHint - ); + + if (this[kPskIdentityHint]) { + validateString(this[kPskIdentityHint], 'options.pskIdentityHint'); } // constructor call diff --git a/lib/async_hooks.js b/lib/async_hooks.js index c120a4828855e9..bf76874ffe2c1b 100644 --- a/lib/async_hooks.js +++ b/lib/async_hooks.js @@ -18,7 +18,6 @@ const { const { ERR_ASYNC_CALLBACK, ERR_ASYNC_TYPE, - ERR_INVALID_ARG_TYPE, ERR_INVALID_ASYNC_ID } = require('internal/errors').codes; const { kEmptyObject } = require('internal/util'); @@ -280,10 +279,8 @@ class AsyncLocalStorage { validateObject(options, 'options'); const { onPropagate = null } = options; - if (onPropagate !== null && typeof onPropagate !== 'function') { - throw new ERR_INVALID_ARG_TYPE('options.onPropagate', - 'function', - onPropagate); + if (onPropagate !== null) { + validateFunction(onPropagate, 'options.onPropagate'); } this.kResourceStore = Symbol('kResourceStore'); From 9648b06e09be93244dd9b3bcfaef43868b9da82e Mon Sep 17 00:00:00 2001 From: Chengzhong Wu Date: Sat, 14 Jan 2023 17:59:17 +0800 Subject: [PATCH 154/191] src: distinguish env stopping flags `Environment::FreeEnvironment` creates a `DisallowJavascriptExecutionScope`, so the flag `Environment::can_call_into_js()` should also be set as `false`. As `Environment::can_call_into_js_` is a simple boolean flag, it should not be accessed off-threads. PR-URL: https://github.com/nodejs/node/pull/45907 Reviewed-By: Anna Henningsen Reviewed-By: Matteo Collina --- src/api/environment.cc | 3 +++ src/env.cc | 7 +++++-- src/env.h | 1 + src/node_http2.cc | 2 +- src/stream_base.cc | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/api/environment.cc b/src/api/environment.cc index 5e4f69921b2635..a17495d93cd98f 100644 --- a/src/api/environment.cc +++ b/src/api/environment.cc @@ -421,6 +421,9 @@ void FreeEnvironment(Environment* env) { Context::Scope context_scope(env->context()); SealHandleScope seal_handle_scope(isolate); + // Set the flag in accordance with the DisallowJavascriptExecutionScope + // above. + env->set_can_call_into_js(false); env->set_stopping(true); env->stop_sub_worker_contexts(); env->RunCleanup(); diff --git a/src/env.cc b/src/env.cc index 1f4f7e2017e4c3..837a879864c46d 100644 --- a/src/env.cc +++ b/src/env.cc @@ -903,10 +903,13 @@ void Environment::InitializeLibuv() { } void Environment::ExitEnv() { - set_can_call_into_js(false); + // Should not access non-thread-safe methods here. set_stopping(true); isolate_->TerminateExecution(); - SetImmediateThreadsafe([](Environment* env) { uv_stop(env->event_loop()); }); + SetImmediateThreadsafe([](Environment* env) { + env->set_can_call_into_js(false); + uv_stop(env->event_loop()); + }); } void Environment::RegisterHandleCleanups() { diff --git a/src/env.h b/src/env.h index 6cfba33607e216..673581fd0e2ef9 100644 --- a/src/env.h +++ b/src/env.h @@ -776,6 +776,7 @@ class Environment : public MemoryRetainer { void stop_sub_worker_contexts(); template inline void ForEachWorker(Fn&& iterator); + // Determine if the environment is stopping. This getter is thread-safe. inline bool is_stopping() const; inline void set_stopping(bool value); inline std::list* extra_linked_bindings(); diff --git a/src/node_http2.cc b/src/node_http2.cc index d88e25da6c6776..3567885774dc54 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -1127,7 +1127,7 @@ int Http2Session::OnStreamClose(nghttp2_session* handle, // Don't close synchronously in case there's pending data to be written. This // may happen when writing trailing headers. if (code == NGHTTP2_NO_ERROR && nghttp2_session_want_write(handle) && - !env->is_stopping()) { + env->can_call_into_js()) { env->SetImmediate([handle, id, code, user_data](Environment* env) { OnStreamClose(handle, id, code, user_data); }); diff --git a/src/stream_base.cc b/src/stream_base.cc index 31cbf8fa199f7f..06840e06b3d5a6 100644 --- a/src/stream_base.cc +++ b/src/stream_base.cc @@ -609,7 +609,7 @@ void ReportWritesToJSStreamListener::OnStreamAfterReqFinished( StreamReq* req_wrap, int status) { StreamBase* stream = static_cast(stream_); Environment* env = stream->stream_env(); - if (env->is_stopping()) return; + if (!env->can_call_into_js()) return; AsyncWrap* async_wrap = req_wrap->GetAsyncWrap(); HandleScope handle_scope(env->isolate()); Context::Scope context_scope(env->context()); From 599d1dc8414284d92a811d68840202373bd4e1e4 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Sat, 14 Jan 2023 15:16:29 +0100 Subject: [PATCH 155/191] crypto: ensure auth tag set for chacha20-poly1305 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Because OpenSSL v1.x doesn't do that by itself (OpenSSL v3.x does.) Fixes: https://github.com/nodejs/node/issues/45874 PR-URL: https://github.com/nodejs/node/pull/46185 Reviewed-By: Tobias Nießen Reviewed-By: Richard Lau Reviewed-By: Filip Skokan Reviewed-By: Yagiz Nizipli Reviewed-By: James M Snell --- src/crypto/crypto_cipher.cc | 8 ++++++ test/parallel/test-crypto-authenticated.js | 31 ++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/src/crypto/crypto_cipher.cc b/src/crypto/crypto_cipher.cc index 81ba818a2dc7d3..2685f5ea0bea99 100644 --- a/src/crypto/crypto_cipher.cc +++ b/src/crypto/crypto_cipher.cc @@ -901,6 +901,14 @@ bool CipherBase::Final(std::unique_ptr* out) { if (kind_ == kDecipher && IsSupportedAuthenticatedMode(ctx_.get())) MaybePassAuthTagToOpenSSL(); + // OpenSSL v1.x doesn't verify the presence of the auth tag so do + // it ourselves, see https://github.com/nodejs/node/issues/45874. + if (OPENSSL_VERSION_NUMBER < 0x30000000L && kind_ == kDecipher && + NID_chacha20_poly1305 == EVP_CIPHER_CTX_nid(ctx_.get()) && + auth_tag_state_ != kAuthTagPassedToOpenSSL) { + return false; + } + // In CCM mode, final() only checks whether authentication failed in update(). // EVP_CipherFinal_ex must not be called and will fail. bool ok; diff --git a/test/parallel/test-crypto-authenticated.js b/test/parallel/test-crypto-authenticated.js index 162b451c5b459c..d358f6b63c0e9f 100644 --- a/test/parallel/test-crypto-authenticated.js +++ b/test/parallel/test-crypto-authenticated.js @@ -786,3 +786,34 @@ for (const test of TEST_CASES) { assert.strictEqual(plaintext.toString('hex'), testCase.plain); } } + +// https://github.com/nodejs/node/issues/45874 +{ + const rfcTestCases = TEST_CASES.filter(({ algo, tampered }) => { + return algo === 'chacha20-poly1305' && tampered === false; + }); + assert.strictEqual(rfcTestCases.length, 1); + + const [testCase] = rfcTestCases; + const key = Buffer.from(testCase.key, 'hex'); + const iv = Buffer.from(testCase.iv, 'hex'); + const aad = Buffer.from(testCase.aad, 'hex'); + const opt = { authTagLength: 16 }; + + const cipher = crypto.createCipheriv('chacha20-poly1305', key, iv, opt); + const ciphertext = Buffer.concat([ + cipher.setAAD(aad).update(testCase.plain, 'hex'), + cipher.final(), + ]); + const authTag = cipher.getAuthTag(); + + assert.strictEqual(ciphertext.toString('hex'), testCase.ct); + assert.strictEqual(authTag.toString('hex'), testCase.tag); + + const decipher = crypto.createDecipheriv('chacha20-poly1305', key, iv, opt); + decipher.setAAD(aad).update(ciphertext); + + assert.throws(() => { + decipher.final(); + }, /Unsupported state or unable to authenticate data/); +} From 9bfd40466fb0aeef0ed8cf980ba6dddb1e613133 Mon Sep 17 00:00:00 2001 From: Debadree Chatterjee Date: Sun, 15 Jan 2023 03:30:15 +0530 Subject: [PATCH 156/191] doc: update http.setMaxIdleHTTPParsers arguments Fixes: https://github.com/nodejs/node/issues/46160 PR-URL: https://github.com/nodejs/node/pull/46168 Reviewed-By: Paolo Insogna Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Darshan Sen Reviewed-By: Matteo Collina Reviewed-By: Luigi Pinca --- doc/api/http.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/api/http.md b/doc/api/http.md index 50fe8a291f6724..bb28a440b79fda 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -3693,15 +3693,15 @@ try { } ``` -## `http.setMaxIdleHTTPParsers` +## `http.setMaxIdleHTTPParsers(max)` -* {number} +* `max` {number} **Default:** `1000`. -Set the maximum number of idle HTTP parsers. **Default:** `1000`. +Set the maximum number of idle HTTP parsers. [RFC 8187]: https://www.rfc-editor.org/rfc/rfc8187.txt [`'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`]: errors.md#err_http_content_length_mismatch From cce2af4306ca11df34bc5e34f288b4dc66058a87 Mon Sep 17 00:00:00 2001 From: Jiawen Geng Date: Sun, 15 Jan 2023 20:23:42 +0800 Subject: [PATCH 157/191] deps: bump googletest to 2023.01.13 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit hash: 356fc301251378e0f6fa6aa794d73714202887ac PR-URL: https://github.com/nodejs/node/pull/46198 Reviewed-By: Richard Lau Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca --- deps/googletest/googletest.gyp | 10 +- .../include/gtest/gtest-assertion-result.h | 237 +++ .../include/gtest/gtest-death-test.h | 85 +- .../googletest/include/gtest/gtest-matchers.h | 78 +- deps/googletest/include/gtest/gtest-message.h | 29 +- .../include/gtest/gtest-param-test.h | 124 +- .../googletest/include/gtest/gtest-printers.h | 150 +- deps/googletest/include/gtest/gtest-spi.h | 134 +- .../include/gtest/gtest-test-part.h | 16 +- .../include/gtest/gtest-typed-test.h | 38 +- deps/googletest/include/gtest/gtest.h | 546 +++---- .../include/gtest/gtest_pred_impl.h | 200 +-- deps/googletest/include/gtest/gtest_prod.h | 9 +- .../include/gtest/internal/custom/README.md | 14 - .../internal/gtest-death-test-internal.h | 73 +- .../include/gtest/internal/gtest-filepath.h | 34 +- .../include/gtest/internal/gtest-internal.h | 335 ++-- .../include/gtest/internal/gtest-param-util.h | 220 ++- .../include/gtest/internal/gtest-port-arch.h | 102 +- .../include/gtest/internal/gtest-port.h | 1140 +++++++------- .../include/gtest/internal/gtest-string.h | 19 +- .../include/gtest/internal/gtest-type-util.h | 25 +- deps/googletest/src/gtest-all.cc | 3 +- deps/googletest/src/gtest-assertion-result.cc | 77 + deps/googletest/src/gtest-death-test.cc | 441 +++--- deps/googletest/src/gtest-filepath.cc | 155 +- deps/googletest/src/gtest-internal-inl.h | 119 +- deps/googletest/src/gtest-matchers.cc | 5 +- deps/googletest/src/gtest-port.cc | 347 ++-- deps/googletest/src/gtest-printers.cc | 122 +- deps/googletest/src/gtest-test-part.cc | 19 +- deps/googletest/src/gtest-typed-test.cc | 7 +- deps/googletest/src/gtest.cc | 1389 +++++++++-------- deps/googletest/src/gtest_main.cc | 17 +- 34 files changed, 3391 insertions(+), 2928 deletions(-) create mode 100644 deps/googletest/include/gtest/gtest-assertion-result.h create mode 100644 deps/googletest/src/gtest-assertion-result.cc diff --git a/deps/googletest/googletest.gyp b/deps/googletest/googletest.gyp index 79387c8704d507..0b4ef28d582aa7 100644 --- a/deps/googletest/googletest.gyp +++ b/deps/googletest/googletest.gyp @@ -4,7 +4,7 @@ 'target_name': 'gtest', 'type': 'static_library', 'sources': [ - 'include/gtest/gtest_pred_impl.h', + 'include/gtest/gtest-assertion-result.h', 'include/gtest/gtest-death-test.h', 'include/gtest/gtest-matchers.h', 'include/gtest/gtest-message.h', @@ -14,6 +14,10 @@ 'include/gtest/gtest-test-part.h', 'include/gtest/gtest-typed-test.h', 'include/gtest/gtest.h', + 'include/gtest/gtest_pred_impl.h', + 'include/gtest/internal/custom/gtest-port.h', + 'include/gtest/internal/custom/gtest-printers.h', + 'include/gtest/internal/custom/gtest.h', 'include/gtest/internal/gtest-death-test-internal.h', 'include/gtest/internal/gtest-filepath.h', 'include/gtest/internal/gtest-internal.h', @@ -22,10 +26,8 @@ 'include/gtest/internal/gtest-port.h', 'include/gtest/internal/gtest-string.h', 'include/gtest/internal/gtest-type-util.h', - 'include/gtest/internal/custom/gtest-port.h', - 'include/gtest/internal/custom/gtest-printers.h', - 'include/gtest/internal/custom/gtest.h', 'src/gtest-all.cc', + 'src/gtest-assertion-result.cc', 'src/gtest-death-test.cc', 'src/gtest-filepath.cc', 'src/gtest-internal-inl.h', diff --git a/deps/googletest/include/gtest/gtest-assertion-result.h b/deps/googletest/include/gtest/gtest-assertion-result.h new file mode 100644 index 00000000000000..addbb59c6413c2 --- /dev/null +++ b/deps/googletest/include/gtest/gtest-assertion-result.h @@ -0,0 +1,237 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// The Google C++ Testing and Mocking Framework (Google Test) +// +// This file implements the AssertionResult type. + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* + +#ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_ASSERTION_RESULT_H_ +#define GOOGLETEST_INCLUDE_GTEST_GTEST_ASSERTION_RESULT_H_ + +#include +#include +#include +#include + +#include "gtest/gtest-message.h" +#include "gtest/internal/gtest-port.h" + +GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ +/* class A needs to have dll-interface to be used by clients of class B */) + +namespace testing { + +// A class for indicating whether an assertion was successful. When +// the assertion wasn't successful, the AssertionResult object +// remembers a non-empty message that describes how it failed. +// +// To create an instance of this class, use one of the factory functions +// (AssertionSuccess() and AssertionFailure()). +// +// This class is useful for two purposes: +// 1. Defining predicate functions to be used with Boolean test assertions +// EXPECT_TRUE/EXPECT_FALSE and their ASSERT_ counterparts +// 2. Defining predicate-format functions to be +// used with predicate assertions (ASSERT_PRED_FORMAT*, etc). +// +// For example, if you define IsEven predicate: +// +// testing::AssertionResult IsEven(int n) { +// if ((n % 2) == 0) +// return testing::AssertionSuccess(); +// else +// return testing::AssertionFailure() << n << " is odd"; +// } +// +// Then the failed expectation EXPECT_TRUE(IsEven(Fib(5))) +// will print the message +// +// Value of: IsEven(Fib(5)) +// Actual: false (5 is odd) +// Expected: true +// +// instead of a more opaque +// +// Value of: IsEven(Fib(5)) +// Actual: false +// Expected: true +// +// in case IsEven is a simple Boolean predicate. +// +// If you expect your predicate to be reused and want to support informative +// messages in EXPECT_FALSE and ASSERT_FALSE (negative assertions show up +// about half as often as positive ones in our tests), supply messages for +// both success and failure cases: +// +// testing::AssertionResult IsEven(int n) { +// if ((n % 2) == 0) +// return testing::AssertionSuccess() << n << " is even"; +// else +// return testing::AssertionFailure() << n << " is odd"; +// } +// +// Then a statement EXPECT_FALSE(IsEven(Fib(6))) will print +// +// Value of: IsEven(Fib(6)) +// Actual: true (8 is even) +// Expected: false +// +// NB: Predicates that support negative Boolean assertions have reduced +// performance in positive ones so be careful not to use them in tests +// that have lots (tens of thousands) of positive Boolean assertions. +// +// To use this class with EXPECT_PRED_FORMAT assertions such as: +// +// // Verifies that Foo() returns an even number. +// EXPECT_PRED_FORMAT1(IsEven, Foo()); +// +// you need to define: +// +// testing::AssertionResult IsEven(const char* expr, int n) { +// if ((n % 2) == 0) +// return testing::AssertionSuccess(); +// else +// return testing::AssertionFailure() +// << "Expected: " << expr << " is even\n Actual: it's " << n; +// } +// +// If Foo() returns 5, you will see the following message: +// +// Expected: Foo() is even +// Actual: it's 5 +// +class GTEST_API_ AssertionResult { + public: + // Copy constructor. + // Used in EXPECT_TRUE/FALSE(assertion_result). + AssertionResult(const AssertionResult& other); + +// C4800 is a level 3 warning in Visual Studio 2015 and earlier. +// This warning is not emitted in Visual Studio 2017. +// This warning is off by default starting in Visual Studio 2019 but can be +// enabled with command-line options. +#if defined(_MSC_VER) && (_MSC_VER < 1910 || _MSC_VER >= 1920) + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4800 /* forcing value to bool */) +#endif + + // Used in the EXPECT_TRUE/FALSE(bool_expression). + // + // T must be contextually convertible to bool. + // + // The second parameter prevents this overload from being considered if + // the argument is implicitly convertible to AssertionResult. In that case + // we want AssertionResult's copy constructor to be used. + template + explicit AssertionResult( + const T& success, + typename std::enable_if< + !std::is_convertible::value>::type* + /*enabler*/ + = nullptr) + : success_(success) {} + +#if defined(_MSC_VER) && (_MSC_VER < 1910 || _MSC_VER >= 1920) + GTEST_DISABLE_MSC_WARNINGS_POP_() +#endif + + // Assignment operator. + AssertionResult& operator=(AssertionResult other) { + swap(other); + return *this; + } + + // Returns true if and only if the assertion succeeded. + operator bool() const { return success_; } // NOLINT + + // Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE. + AssertionResult operator!() const; + + // Returns the text streamed into this AssertionResult. Test assertions + // use it when they fail (i.e., the predicate's outcome doesn't match the + // assertion's expectation). When nothing has been streamed into the + // object, returns an empty string. + const char* message() const { + return message_.get() != nullptr ? message_->c_str() : ""; + } + // Deprecated; please use message() instead. + const char* failure_message() const { return message(); } + + // Streams a custom failure message into this object. + template + AssertionResult& operator<<(const T& value) { + AppendMessage(Message() << value); + return *this; + } + + // Allows streaming basic output manipulators such as endl or flush into + // this object. + AssertionResult& operator<<( + ::std::ostream& (*basic_manipulator)(::std::ostream& stream)) { + AppendMessage(Message() << basic_manipulator); + return *this; + } + + private: + // Appends the contents of message to message_. + void AppendMessage(const Message& a_message) { + if (message_.get() == nullptr) message_.reset(new ::std::string); + message_->append(a_message.GetString().c_str()); + } + + // Swap the contents of this AssertionResult with other. + void swap(AssertionResult& other); + + // Stores result of the assertion predicate. + bool success_; + // Stores the message describing the condition in case the expectation + // construct is not satisfied with the predicate's outcome. + // Referenced via a pointer to avoid taking too much stack frame space + // with test assertions. + std::unique_ptr< ::std::string> message_; +}; + +// Makes a successful assertion result. +GTEST_API_ AssertionResult AssertionSuccess(); + +// Makes a failed assertion result. +GTEST_API_ AssertionResult AssertionFailure(); + +// Makes a failed assertion result with the given failure message. +// Deprecated; use AssertionFailure() << msg. +GTEST_API_ AssertionResult AssertionFailure(const Message& msg); + +} // namespace testing + +GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 + +#endif // GOOGLETEST_INCLUDE_GTEST_GTEST_ASSERTION_RESULT_H_ diff --git a/deps/googletest/include/gtest/gtest-death-test.h b/deps/googletest/include/gtest/gtest-death-test.h index 4df53d973d3e27..84e5a5bbd372df 100644 --- a/deps/googletest/include/gtest/gtest-death-test.h +++ b/deps/googletest/include/gtest/gtest-death-test.h @@ -27,13 +27,15 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// // The Google C++ Testing and Mocking Framework (Google Test) // // This header file defines the public API for death tests. It is // #included by gtest.h so a user doesn't need to include this // directly. -// GOOGLETEST_CM0001 DO NOT DELETE + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_ @@ -103,7 +105,6 @@ GTEST_API_ bool InDeathTestChild(); // // On the regular expressions used in death tests: // -// GOOGLETEST_CM0005 DO NOT DELETE // On POSIX-compliant systems (*nix), we use the library, // which uses the POSIX extended regex syntax. // @@ -169,24 +170,24 @@ GTEST_API_ bool InDeathTestChild(); // Asserts that a given `statement` causes the program to exit, with an // integer exit status that satisfies `predicate`, and emitting error output // that matches `matcher`. -# define ASSERT_EXIT(statement, predicate, matcher) \ - GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_) +#define ASSERT_EXIT(statement, predicate, matcher) \ + GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_FATAL_FAILURE_) // Like `ASSERT_EXIT`, but continues on to successive tests in the // test suite, if any: -# define EXPECT_EXIT(statement, predicate, matcher) \ - GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_) +#define EXPECT_EXIT(statement, predicate, matcher) \ + GTEST_DEATH_TEST_(statement, predicate, matcher, GTEST_NONFATAL_FAILURE_) // Asserts that a given `statement` causes the program to exit, either by // explicitly exiting with a nonzero exit code or being killed by a // signal, and emitting error output that matches `matcher`. -# define ASSERT_DEATH(statement, matcher) \ - ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher) +#define ASSERT_DEATH(statement, matcher) \ + ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher) // Like `ASSERT_DEATH`, but continues on to successive tests in the // test suite, if any: -# define EXPECT_DEATH(statement, matcher) \ - EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher) +#define EXPECT_DEATH(statement, matcher) \ + EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, matcher) // Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*: @@ -197,22 +198,23 @@ class GTEST_API_ ExitedWithCode { ExitedWithCode(const ExitedWithCode&) = default; void operator=(const ExitedWithCode& other) = delete; bool operator()(int exit_status) const; + private: const int exit_code_; }; -# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA // Tests that an exit code describes an exit due to termination by a // given signal. -// GOOGLETEST_CM0006 DO NOT DELETE class GTEST_API_ KilledBySignal { public: explicit KilledBySignal(int signum); bool operator()(int exit_status) const; + private: const int signum_; }; -# endif // !GTEST_OS_WINDOWS +#endif // !GTEST_OS_WINDOWS // EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode. // The death testing framework causes this to have interesting semantics, @@ -257,23 +259,21 @@ class GTEST_API_ KilledBySignal { // EXPECT_EQ(12, DieInDebugOr12(&sideeffect)); // }, "death"); // -# ifdef NDEBUG +#ifdef NDEBUG -# define EXPECT_DEBUG_DEATH(statement, regex) \ +#define EXPECT_DEBUG_DEATH(statement, regex) \ GTEST_EXECUTE_STATEMENT_(statement, regex) -# define ASSERT_DEBUG_DEATH(statement, regex) \ +#define ASSERT_DEBUG_DEATH(statement, regex) \ GTEST_EXECUTE_STATEMENT_(statement, regex) -# else +#else -# define EXPECT_DEBUG_DEATH(statement, regex) \ - EXPECT_DEATH(statement, regex) +#define EXPECT_DEBUG_DEATH(statement, regex) EXPECT_DEATH(statement, regex) -# define ASSERT_DEBUG_DEATH(statement, regex) \ - ASSERT_DEATH(statement, regex) +#define ASSERT_DEBUG_DEATH(statement, regex) ASSERT_DEATH(statement, regex) -# endif // NDEBUG for EXPECT_DEBUG_DEATH +#endif // NDEBUG for EXPECT_DEBUG_DEATH #endif // GTEST_HAS_DEATH_TEST // This macro is used for implementing macros such as @@ -311,18 +311,17 @@ class GTEST_API_ KilledBySignal { // statement unconditionally returns or throws. The Message constructor at // the end allows the syntax of streaming additional messages into the // macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH. -# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (::testing::internal::AlwaysTrue()) { \ - GTEST_LOG_(WARNING) \ - << "Death tests are not supported on this platform.\n" \ - << "Statement '" #statement "' cannot be verified."; \ - } else if (::testing::internal::AlwaysFalse()) { \ - ::testing::internal::RE::PartialMatch(".*", (regex)); \ - GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ - terminator; \ - } else \ - ::testing::Message() +#define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (::testing::internal::AlwaysTrue()) { \ + GTEST_LOG_(WARNING) << "Death tests are not supported on this platform.\n" \ + << "Statement '" #statement "' cannot be verified."; \ + } else if (::testing::internal::AlwaysFalse()) { \ + ::testing::internal::RE::PartialMatch(".*", (regex)); \ + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ + terminator; \ + } else \ + ::testing::Message() // EXPECT_DEATH_IF_SUPPORTED(statement, regex) and // ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if @@ -330,15 +329,15 @@ class GTEST_API_ KilledBySignal { // useful when you are combining death test assertions with normal test // assertions in one test. #if GTEST_HAS_DEATH_TEST -# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \ - EXPECT_DEATH(statement, regex) -# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \ - ASSERT_DEATH(statement, regex) +#define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \ + EXPECT_DEATH(statement, regex) +#define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \ + ASSERT_DEATH(statement, regex) #else -# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \ - GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, ) -# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \ - GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return) +#define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \ + GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, ) +#define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \ + GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return) #endif } // namespace testing diff --git a/deps/googletest/include/gtest/gtest-matchers.h b/deps/googletest/include/gtest/gtest-matchers.h index 9fa34a05ba418e..4a60b0d0b8da61 100644 --- a/deps/googletest/include/gtest/gtest-matchers.h +++ b/deps/googletest/include/gtest/gtest-matchers.h @@ -32,6 +32,10 @@ // This file implements just enough of the matcher interface to allow // EXPECT_DEATH and friends to accept a matcher argument. +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* + #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_MATCHERS_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_MATCHERS_H_ @@ -98,11 +102,11 @@ class MatchResultListener { private: ::std::ostream* const stream_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(MatchResultListener); + MatchResultListener(const MatchResultListener&) = delete; + MatchResultListener& operator=(const MatchResultListener&) = delete; }; -inline MatchResultListener::~MatchResultListener() { -} +inline MatchResultListener::~MatchResultListener() {} // An instance of a subclass of this knows how to describe itself as a // matcher. @@ -176,27 +180,39 @@ namespace internal { struct AnyEq { template - bool operator()(const A& a, const B& b) const { return a == b; } + bool operator()(const A& a, const B& b) const { + return a == b; + } }; struct AnyNe { template - bool operator()(const A& a, const B& b) const { return a != b; } + bool operator()(const A& a, const B& b) const { + return a != b; + } }; struct AnyLt { template - bool operator()(const A& a, const B& b) const { return a < b; } + bool operator()(const A& a, const B& b) const { + return a < b; + } }; struct AnyGt { template - bool operator()(const A& a, const B& b) const { return a > b; } + bool operator()(const A& a, const B& b) const { + return a > b; + } }; struct AnyLe { template - bool operator()(const A& a, const B& b) const { return a <= b; } + bool operator()(const A& a, const B& b) const { + return a <= b; + } }; struct AnyGe { template - bool operator()(const A& a, const B& b) const { return a >= b; } + bool operator()(const A& a, const B& b) const { + return a >= b; + } }; // A match result listener that ignores the explanation. @@ -205,7 +221,8 @@ class DummyMatchResultListener : public MatchResultListener { DummyMatchResultListener() : MatchResultListener(nullptr) {} private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(DummyMatchResultListener); + DummyMatchResultListener(const DummyMatchResultListener&) = delete; + DummyMatchResultListener& operator=(const DummyMatchResultListener&) = delete; }; // A match result listener that forwards the explanation to a given @@ -217,7 +234,9 @@ class StreamMatchResultListener : public MatchResultListener { : MatchResultListener(os) {} private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamMatchResultListener); + StreamMatchResultListener(const StreamMatchResultListener&) = delete; + StreamMatchResultListener& operator=(const StreamMatchResultListener&) = + delete; }; struct SharedPayloadBase { @@ -284,17 +303,18 @@ class MatcherBase : private MatcherDescriberInterface { } protected: - MatcherBase() : vtable_(nullptr) {} + MatcherBase() : vtable_(nullptr), buffer_() {} // Constructs a matcher from its implementation. template - explicit MatcherBase(const MatcherInterface* impl) { + explicit MatcherBase(const MatcherInterface* impl) + : vtable_(nullptr), buffer_() { Init(impl); } template ::type::is_gtest_matcher> - MatcherBase(M&& m) { // NOLINT + MatcherBase(M&& m) : vtable_(nullptr), buffer_() { // NOLINT Init(std::forward(m)); } @@ -420,8 +440,8 @@ class MatcherBase : private MatcherDescriberInterface { static const M& Get(const MatcherBase& m) { // When inlined along with Init, need to be explicit to avoid violating // strict aliasing rules. - const M *ptr = static_cast( - static_cast(&m.buffer_)); + const M* ptr = + static_cast(static_cast(&m.buffer_)); return *ptr; } static void Init(MatcherBase& m, M impl) { @@ -741,7 +761,7 @@ template class EqMatcher : public ComparisonBase, Rhs, AnyEq> { public: explicit EqMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyEq>(rhs) { } + : ComparisonBase, Rhs, AnyEq>(rhs) {} static const char* Desc() { return "is equal to"; } static const char* NegatedDesc() { return "isn't equal to"; } }; @@ -749,7 +769,7 @@ template class NeMatcher : public ComparisonBase, Rhs, AnyNe> { public: explicit NeMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyNe>(rhs) { } + : ComparisonBase, Rhs, AnyNe>(rhs) {} static const char* Desc() { return "isn't equal to"; } static const char* NegatedDesc() { return "is equal to"; } }; @@ -757,7 +777,7 @@ template class LtMatcher : public ComparisonBase, Rhs, AnyLt> { public: explicit LtMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyLt>(rhs) { } + : ComparisonBase, Rhs, AnyLt>(rhs) {} static const char* Desc() { return "is <"; } static const char* NegatedDesc() { return "isn't <"; } }; @@ -765,7 +785,7 @@ template class GtMatcher : public ComparisonBase, Rhs, AnyGt> { public: explicit GtMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyGt>(rhs) { } + : ComparisonBase, Rhs, AnyGt>(rhs) {} static const char* Desc() { return "is >"; } static const char* NegatedDesc() { return "isn't >"; } }; @@ -773,7 +793,7 @@ template class LeMatcher : public ComparisonBase, Rhs, AnyLe> { public: explicit LeMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyLe>(rhs) { } + : ComparisonBase, Rhs, AnyLe>(rhs) {} static const char* Desc() { return "is <="; } static const char* NegatedDesc() { return "isn't <="; } }; @@ -781,7 +801,7 @@ template class GeMatcher : public ComparisonBase, Rhs, AnyGe> { public: explicit GeMatcher(const Rhs& rhs) - : ComparisonBase, Rhs, AnyGe>(rhs) { } + : ComparisonBase, Rhs, AnyGe>(rhs) {} static const char* Desc() { return "is >="; } static const char* NegatedDesc() { return "isn't >="; } }; @@ -822,7 +842,7 @@ class MatchesRegexMatcher { template bool MatchAndExplain(const MatcheeStringType& s, MatchResultListener* /* listener */) const { - const std::string& s2(s); + const std::string s2(s); return full_match_ ? RE::FullMatch(s2, *regex_) : RE::PartialMatch(s2, *regex_); } @@ -872,12 +892,16 @@ PolymorphicMatcher ContainsRegex( // Note: if the parameter of Eq() were declared as const T&, Eq("foo") // wouldn't compile. template -inline internal::EqMatcher Eq(T x) { return internal::EqMatcher(x); } +inline internal::EqMatcher Eq(T x) { + return internal::EqMatcher(x); +} // Constructs a Matcher from a 'value' of type T. The constructed // matcher matches any value that's equal to 'value'. template -Matcher::Matcher(T value) { *this = Eq(value); } +Matcher::Matcher(T value) { + *this = Eq(value); +} // Creates a monomorphic matcher that matches anything with type Lhs // and equal to rhs. A user may need to use this instead of Eq(...) @@ -892,7 +916,9 @@ Matcher::Matcher(T value) { *this = Eq(value); } // can always write Matcher(Lt(5)) to be explicit about the type, // for example. template -inline Matcher TypedEq(const Rhs& rhs) { return Eq(rhs); } +inline Matcher TypedEq(const Rhs& rhs) { + return Eq(rhs); +} // Creates a polymorphic matcher that matches anything >= x. template diff --git a/deps/googletest/include/gtest/gtest-message.h b/deps/googletest/include/gtest/gtest-message.h index becfd49fcba9c5..4d4b152b1d8b70 100644 --- a/deps/googletest/include/gtest/gtest-message.h +++ b/deps/googletest/include/gtest/gtest-message.h @@ -27,7 +27,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// // The Google C++ Testing and Mocking Framework (Google Test) // // This header file defines the Message class. @@ -42,14 +41,18 @@ // to CHANGE WITHOUT NOTICE. Therefore DO NOT DEPEND ON IT in a user // program! -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_MESSAGE_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_MESSAGE_H_ #include #include +#include #include +#include #include "gtest/internal/gtest-port.h" @@ -110,8 +113,8 @@ class GTEST_API_ Message { // Streams a non-pointer value to this object. template - inline Message& operator <<(const T& val) { - // Some libraries overload << for STL containers. These + inline Message& operator<<(const T& val) { + // Some libraries overload << for STL containers. These // overloads are defined in the global namespace instead of ::std. // // C++'s symbol lookup rule (i.e. Koenig lookup) says that these @@ -125,7 +128,7 @@ class GTEST_API_ Message { // from the global namespace. With this using declaration, // overloads of << defined in the global namespace and those // visible via Koenig lookup are both exposed in this function. - using ::operator <<; + using ::operator<<; *ss_ << val; return *this; } @@ -144,7 +147,7 @@ class GTEST_API_ Message { // ensure consistent result across compilers, we always treat NULL // as "(null)". template - inline Message& operator <<(T* const& pointer) { // NOLINT + inline Message& operator<<(T* const& pointer) { // NOLINT if (pointer == nullptr) { *ss_ << "(null)"; } else { @@ -159,25 +162,23 @@ class GTEST_API_ Message { // templatized version above. Without this definition, streaming // endl or other basic IO manipulators to Message will confuse the // compiler. - Message& operator <<(BasicNarrowIoManip val) { + Message& operator<<(BasicNarrowIoManip val) { *ss_ << val; return *this; } // Instead of 1/0, we want to see true/false for bool values. - Message& operator <<(bool b) { - return *this << (b ? "true" : "false"); - } + Message& operator<<(bool b) { return *this << (b ? "true" : "false"); } // These two overloads allow streaming a wide C string to a Message // using the UTF-8 encoding. - Message& operator <<(const wchar_t* wide_c_str); - Message& operator <<(wchar_t* wide_c_str); + Message& operator<<(const wchar_t* wide_c_str); + Message& operator<<(wchar_t* wide_c_str); #if GTEST_HAS_STD_WSTRING // Converts the given wide string to a narrow string using the UTF-8 // encoding, and streams the result to this Message object. - Message& operator <<(const ::std::wstring& wstr); + Message& operator<<(const ::std::wstring& wstr); #endif // GTEST_HAS_STD_WSTRING // Gets the text streamed to this object so far as an std::string. @@ -196,7 +197,7 @@ class GTEST_API_ Message { }; // Streams a Message to an ostream. -inline std::ostream& operator <<(std::ostream& os, const Message& sb) { +inline std::ostream& operator<<(std::ostream& os, const Message& sb) { return os << sb.GetString(); } diff --git a/deps/googletest/include/gtest/gtest-param-test.h b/deps/googletest/include/gtest/gtest-param-test.h index 804e702817b03b..1adb9a725203ed 100644 --- a/deps/googletest/include/gtest/gtest-param-test.h +++ b/deps/googletest/include/gtest/gtest-param-test.h @@ -26,11 +26,14 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // Macros and functions for implementing parameterized tests // in Google C++ Testing and Mocking Framework (Google Test) -// -// GOOGLETEST_CM0001 DO NOT DELETE + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* + #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_ @@ -353,9 +356,7 @@ internal::ValueArray Values(T... v) { // } // INSTANTIATE_TEST_SUITE_P(BoolSequence, FlagDependentTest, Bool()); // -inline internal::ParamGenerator Bool() { - return Values(false, true); -} +inline internal::ParamGenerator Bool() { return Values(false, true); } // Combine() allows the user to combine two or more sequences to produce // values of a Cartesian product of those sequences' elements. @@ -406,9 +407,49 @@ internal::CartesianProductHolder Combine(const Generator&... g) { return internal::CartesianProductHolder(g...); } +// ConvertGenerator() wraps a parameter generator in order to cast each produced +// value through a known type before supplying it to the test suite +// +// Synopsis: +// ConvertGenerator(gen) +// - returns a generator producing the same elements as generated by gen, but +// each element is static_cast to type T before being returned +// +// It is useful when using the Combine() function to get the generated +// parameters in a custom type instead of std::tuple +// +// Example: +// +// This will instantiate tests in test suite AnimalTest each one with +// the parameter values tuple("cat", BLACK), tuple("cat", WHITE), +// tuple("dog", BLACK), and tuple("dog", WHITE): +// +// enum Color { BLACK, GRAY, WHITE }; +// struct ParamType { +// using TupleT = std::tuple; +// std::string animal; +// Color color; +// ParamType(TupleT t) : animal(std::get<0>(t)), color(std::get<1>(t)) {} +// }; +// class AnimalTest +// : public testing::TestWithParam {...}; +// +// TEST_P(AnimalTest, AnimalLooksNice) {...} +// +// INSTANTIATE_TEST_SUITE_P(AnimalVariations, AnimalTest, +// ConvertGenerator( +// Combine(Values("cat", "dog"), +// Values(BLACK, WHITE)))); +// +template +internal::ParamConverterGenerator ConvertGenerator( + internal::ParamGenerator gen) { + return internal::ParamConverterGenerator(gen); +} + #define TEST_P(test_suite_name, test_name) \ class GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ - : public test_suite_name { \ + : public test_suite_name, private ::testing::internal::GTestNonCopyable {\ public: \ GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() {} \ void TestBody() override; \ @@ -428,8 +469,6 @@ internal::CartesianProductHolder Combine(const Generator&... g) { return 0; \ } \ static int gtest_registering_dummy_ GTEST_ATTRIBUTE_UNUSED_; \ - GTEST_DISALLOW_COPY_AND_ASSIGN_(GTEST_TEST_CLASS_NAME_(test_suite_name, \ - test_name)); \ }; \ int GTEST_TEST_CLASS_NAME_(test_suite_name, \ test_name)::gtest_registering_dummy_ = \ @@ -453,43 +492,42 @@ internal::CartesianProductHolder Combine(const Generator&... g) { #define GTEST_GET_FIRST_(first, ...) first #define GTEST_GET_SECOND_(first, second, ...) second -#define INSTANTIATE_TEST_SUITE_P(prefix, test_suite_name, ...) \ - static ::testing::internal::ParamGenerator \ - gtest_##prefix##test_suite_name##_EvalGenerator_() { \ - return GTEST_EXPAND_(GTEST_GET_FIRST_(__VA_ARGS__, DUMMY_PARAM_)); \ - } \ - static ::std::string gtest_##prefix##test_suite_name##_EvalGenerateName_( \ - const ::testing::TestParamInfo& info) { \ - if (::testing::internal::AlwaysFalse()) { \ - ::testing::internal::TestNotEmpty(GTEST_EXPAND_(GTEST_GET_SECOND_( \ - __VA_ARGS__, \ - ::testing::internal::DefaultParamName, \ - DUMMY_PARAM_))); \ - auto t = std::make_tuple(__VA_ARGS__); \ - static_assert(std::tuple_size::value <= 2, \ - "Too Many Args!"); \ - } \ - return ((GTEST_EXPAND_(GTEST_GET_SECOND_( \ - __VA_ARGS__, \ - ::testing::internal::DefaultParamName, \ - DUMMY_PARAM_))))(info); \ - } \ - static int gtest_##prefix##test_suite_name##_dummy_ \ - GTEST_ATTRIBUTE_UNUSED_ = \ - ::testing::UnitTest::GetInstance() \ - ->parameterized_test_registry() \ - .GetTestSuitePatternHolder( \ - GTEST_STRINGIFY_(test_suite_name), \ - ::testing::internal::CodeLocation(__FILE__, __LINE__)) \ - ->AddTestSuiteInstantiation( \ - GTEST_STRINGIFY_(prefix), \ - >est_##prefix##test_suite_name##_EvalGenerator_, \ - >est_##prefix##test_suite_name##_EvalGenerateName_, \ +#define INSTANTIATE_TEST_SUITE_P(prefix, test_suite_name, ...) \ + static ::testing::internal::ParamGenerator \ + gtest_##prefix##test_suite_name##_EvalGenerator_() { \ + return GTEST_EXPAND_(GTEST_GET_FIRST_(__VA_ARGS__, DUMMY_PARAM_)); \ + } \ + static ::std::string gtest_##prefix##test_suite_name##_EvalGenerateName_( \ + const ::testing::TestParamInfo& info) { \ + if (::testing::internal::AlwaysFalse()) { \ + ::testing::internal::TestNotEmpty(GTEST_EXPAND_(GTEST_GET_SECOND_( \ + __VA_ARGS__, \ + ::testing::internal::DefaultParamName, \ + DUMMY_PARAM_))); \ + auto t = std::make_tuple(__VA_ARGS__); \ + static_assert(std::tuple_size::value <= 2, \ + "Too Many Args!"); \ + } \ + return ((GTEST_EXPAND_(GTEST_GET_SECOND_( \ + __VA_ARGS__, \ + ::testing::internal::DefaultParamName, \ + DUMMY_PARAM_))))(info); \ + } \ + static int gtest_##prefix##test_suite_name##_dummy_ \ + GTEST_ATTRIBUTE_UNUSED_ = \ + ::testing::UnitTest::GetInstance() \ + ->parameterized_test_registry() \ + .GetTestSuitePatternHolder( \ + GTEST_STRINGIFY_(test_suite_name), \ + ::testing::internal::CodeLocation(__FILE__, __LINE__)) \ + ->AddTestSuiteInstantiation( \ + GTEST_STRINGIFY_(prefix), \ + >est_##prefix##test_suite_name##_EvalGenerator_, \ + >est_##prefix##test_suite_name##_EvalGenerateName_, \ __FILE__, __LINE__) - // Allow Marking a Parameterized test class as not needing to be instantiated. -#define GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(T) \ +#define GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(T) \ namespace gtest_do_not_use_outside_namespace_scope {} \ static const ::testing::internal::MarkAsIgnored gtest_allow_ignore_##T( \ GTEST_STRINGIFY_(T)) diff --git a/deps/googletest/include/gtest/gtest-printers.h b/deps/googletest/include/gtest/gtest-printers.h index 8a3431d1b3b8b9..0055e37ffa7c96 100644 --- a/deps/googletest/include/gtest/gtest-printers.h +++ b/deps/googletest/include/gtest/gtest-printers.h @@ -27,7 +27,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - // Google Test - The Google C++ Testing and Mocking Framework // // This file implements a universal value printer that can print a @@ -95,7 +94,9 @@ // being defined as many user-defined container types don't have // value_type. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_PRINTERS_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_PRINTERS_H_ @@ -107,6 +108,7 @@ #include #include #include +#include #include #include @@ -257,12 +259,10 @@ struct ConvertibleToStringViewPrinter { #endif }; - // Prints the given number of bytes in the given object to the given // ostream. GTEST_API_ void PrintBytesInObjectTo(const unsigned char* obj_bytes, - size_t count, - ::std::ostream* os); + size_t count, ::std::ostream* os); struct RawBytesPrinter { // SFINAE on `sizeof` to make sure we have a complete type. template @@ -375,17 +375,17 @@ GTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(const char32_t); // to point to a NUL-terminated string, and thus can print it as a string. #define GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(CharType, OtherStringType) \ - template <> \ - class FormatForComparison { \ - public: \ - static ::std::string Format(CharType* value) { \ - return ::testing::PrintToString(value); \ - } \ + template <> \ + class FormatForComparison { \ + public: \ + static ::std::string Format(CharType* value) { \ + return ::testing::PrintToString(value); \ + } \ } GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(char, ::std::string); GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(const char, ::std::string); -#ifdef __cpp_char8_t +#ifdef __cpp_lib_char8_t GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(char8_t, ::std::u8string); GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(const char8_t, ::std::u8string); #endif @@ -410,8 +410,8 @@ GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(const wchar_t, ::std::wstring); // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. template -std::string FormatForComparisonFailureMessage( - const T1& value, const T2& /* other_operand */) { +std::string FormatForComparisonFailureMessage(const T1& value, + const T2& /* other_operand */) { return FormatForComparison::Format(value); } @@ -479,6 +479,87 @@ inline void PrintTo(char8_t c, ::std::ostream* os) { } #endif +// gcc/clang __{u,}int128_t +#if defined(__SIZEOF_INT128__) +GTEST_API_ void PrintTo(__uint128_t v, ::std::ostream* os); +GTEST_API_ void PrintTo(__int128_t v, ::std::ostream* os); +#endif // __SIZEOF_INT128__ + +// The default resolution used to print floating-point values uses only +// 6 digits, which can be confusing if a test compares two values whose +// difference lies in the 7th digit. So we'd like to print out numbers +// in full precision. +// However if the value is something simple like 1.1, full will print a +// long string like 1.100000001 due to floating-point numbers not using +// a base of 10. This routiune returns an appropriate resolution for a +// given floating-point number, that is, 6 if it will be accurate, or a +// max_digits10 value (full precision) if it won't, for values between +// 0.0001 and one million. +// It does this by computing what those digits would be (by multiplying +// by an appropriate power of 10), then dividing by that power again to +// see if gets the original value back. +// A similar algorithm applies for values larger than one million; note +// that for those values, we must divide to get a six-digit number, and +// then multiply to possibly get the original value again. +template +int AppropriateResolution(FloatType val) { + int full = std::numeric_limits::max_digits10; + if (val < 0) val = -val; + + if (val < 1000000) { + FloatType mulfor6 = 1e10; + if (val >= 100000.0) { // 100,000 to 999,999 + mulfor6 = 1.0; + } else if (val >= 10000.0) { + mulfor6 = 1e1; + } else if (val >= 1000.0) { + mulfor6 = 1e2; + } else if (val >= 100.0) { + mulfor6 = 1e3; + } else if (val >= 10.0) { + mulfor6 = 1e4; + } else if (val >= 1.0) { + mulfor6 = 1e5; + } else if (val >= 0.1) { + mulfor6 = 1e6; + } else if (val >= 0.01) { + mulfor6 = 1e7; + } else if (val >= 0.001) { + mulfor6 = 1e8; + } else if (val >= 0.0001) { + mulfor6 = 1e9; + } + if (static_cast(val * mulfor6 + 0.5) / mulfor6 == val) return 6; + } else if (val < 1e10) { + FloatType divfor6 = 1.0; + if (val >= 1e9) { // 1,000,000,000 to 9,999,999,999 + divfor6 = 10000; + } else if (val >= 1e8) { // 100,000,000 to 999,999,999 + divfor6 = 1000; + } else if (val >= 1e7) { // 10,000,000 to 99,999,999 + divfor6 = 100; + } else if (val >= 1e6) { // 1,000,000 to 9,999,999 + divfor6 = 10; + } + if (static_cast(val / divfor6 + 0.5) * divfor6 == val) return 6; + } + return full; +} + +inline void PrintTo(float f, ::std::ostream* os) { + auto old_precision = os->precision(); + os->precision(AppropriateResolution(f)); + *os << f; + os->precision(old_precision); +} + +inline void PrintTo(double d, ::std::ostream* os) { + auto old_precision = os->precision(); + os->precision(AppropriateResolution(d)); + *os << d; + os->precision(old_precision); +} + // Overloads for C strings. GTEST_API_ void PrintTo(const char* s, ::std::ostream* os); inline void PrintTo(char* s, ::std::ostream* os) { @@ -545,13 +626,13 @@ void PrintRawArrayTo(const T a[], size_t count, ::std::ostream* os) { } // Overloads for ::std::string. -GTEST_API_ void PrintStringTo(const ::std::string&s, ::std::ostream* os); +GTEST_API_ void PrintStringTo(const ::std::string& s, ::std::ostream* os); inline void PrintTo(const ::std::string& s, ::std::ostream* os) { PrintStringTo(s, os); } // Overloads for ::std::u8string -#ifdef __cpp_char8_t +#ifdef __cpp_lib_char8_t GTEST_API_ void PrintU8StringTo(const ::std::u8string& s, ::std::ostream* os); inline void PrintTo(const ::std::u8string& s, ::std::ostream* os) { PrintU8StringTo(s, os); @@ -572,7 +653,7 @@ inline void PrintTo(const ::std::u32string& s, ::std::ostream* os) { // Overloads for ::std::wstring. #if GTEST_HAS_STD_WSTRING -GTEST_API_ void PrintWideStringTo(const ::std::wstring&s, ::std::ostream* os); +GTEST_API_ void PrintWideStringTo(const ::std::wstring& s, ::std::ostream* os); inline void PrintTo(const ::std::wstring& s, ::std::ostream* os) { PrintWideStringTo(s, os); } @@ -587,6 +668,12 @@ inline void PrintTo(internal::StringView sp, ::std::ostream* os) { inline void PrintTo(std::nullptr_t, ::std::ostream* os) { *os << "(nullptr)"; } +#if GTEST_HAS_RTTI +inline void PrintTo(const std::type_info& info, std::ostream* os) { + *os << internal::GetTypeName(info); +} +#endif // GTEST_HAS_RTTI + template void PrintTo(std::reference_wrapper ref, ::std::ostream* os) { UniversalPrinter::Print(ref.get(), os); @@ -744,6 +831,14 @@ class UniversalPrinter> { } }; +template <> +class UniversalPrinter { + public: + static void Print(decltype(Nullopt()), ::std::ostream* os) { + *os << "(nullopt)"; + } +}; + #endif // GTEST_INTERNAL_HAS_OPTIONAL #if GTEST_INTERNAL_HAS_VARIANT @@ -802,8 +897,8 @@ void UniversalPrintArray(const T* begin, size_t len, ::std::ostream* os) { } } // This overload prints a (const) char array compactly. -GTEST_API_ void UniversalPrintArray( - const char* begin, size_t len, ::std::ostream* os); +GTEST_API_ void UniversalPrintArray(const char* begin, size_t len, + ::std::ostream* os); #ifdef __cpp_char8_t // This overload prints a (const) char8_t array compactly. @@ -820,8 +915,8 @@ GTEST_API_ void UniversalPrintArray(const char32_t* begin, size_t len, ::std::ostream* os); // This overload prints a (const) wchar_t array compactly. -GTEST_API_ void UniversalPrintArray( - const wchar_t* begin, size_t len, ::std::ostream* os); +GTEST_API_ void UniversalPrintArray(const wchar_t* begin, size_t len, + ::std::ostream* os); // Implements printing an array type T[N]. template @@ -872,6 +967,13 @@ class UniversalTersePrinter { UniversalPrint(value, os); } }; +template +class UniversalTersePrinter> { + public: + static void Print(std::reference_wrapper value, ::std::ostream* os) { + UniversalTersePrinter::Print(value.get(), os); + } +}; template class UniversalTersePrinter { public: @@ -980,10 +1082,10 @@ void UniversalPrint(const T& value, ::std::ostream* os) { UniversalPrinter::Print(value, os); } -typedef ::std::vector< ::std::string> Strings; +typedef ::std::vector<::std::string> Strings; - // Tersely prints the first N fields of a tuple to a string vector, - // one element for each field. +// Tersely prints the first N fields of a tuple to a string vector, +// one element for each field. template void TersePrintPrefixToStrings(const Tuple&, std::integral_constant, Strings*) {} diff --git a/deps/googletest/include/gtest/gtest-spi.h b/deps/googletest/include/gtest/gtest-spi.h index eacef44669e810..c0613b6959560b 100644 --- a/deps/googletest/include/gtest/gtest-spi.h +++ b/deps/googletest/include/gtest/gtest-spi.h @@ -27,15 +27,14 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// // Utilities for testing Google Test itself and code that uses Google Test // (e.g. frameworks built on top of Google Test). -// GOOGLETEST_CM0004 DO NOT DELETE - #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_SPI_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_SPI_H_ +#include + #include "gtest/gtest.h" GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ @@ -88,7 +87,10 @@ class GTEST_API_ ScopedFakeTestPartResultReporter TestPartResultReporterInterface* old_reporter_; TestPartResultArray* const result_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedFakeTestPartResultReporter); + ScopedFakeTestPartResultReporter(const ScopedFakeTestPartResultReporter&) = + delete; + ScopedFakeTestPartResultReporter& operator=( + const ScopedFakeTestPartResultReporter&) = delete; }; namespace internal { @@ -104,12 +106,14 @@ class GTEST_API_ SingleFailureChecker { SingleFailureChecker(const TestPartResultArray* results, TestPartResult::Type type, const std::string& substr); ~SingleFailureChecker(); + private: const TestPartResultArray* const results_; const TestPartResult::Type type_; const std::string substr_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(SingleFailureChecker); + SingleFailureChecker(const SingleFailureChecker&) = delete; + SingleFailureChecker& operator=(const SingleFailureChecker&) = delete; }; } // namespace internal @@ -119,7 +123,8 @@ class GTEST_API_ SingleFailureChecker { GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 // A set of macros for testing Google Test assertions or code that's expected -// to generate Google Test fatal failures. It verifies that the given +// to generate Google Test fatal failures (e.g. a failure from an ASSERT_EQ, but +// not a non-fatal failure, as from EXPECT_EQ). It verifies that the given // statement will cause exactly one fatal Google Test failure with 'substr' // being part of the failure message. // @@ -141,44 +146,46 @@ GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 // helper macro, due to some peculiarity in how the preprocessor // works. The AcceptsMacroThatExpandsToUnprotectedComma test in // gtest_unittest.cc will fail to compile if we do that. -#define EXPECT_FATAL_FAILURE(statement, substr) \ - do { \ - class GTestExpectFatalFailureHelper {\ - public:\ - static void Execute() { statement; }\ - };\ - ::testing::TestPartResultArray gtest_failures;\ - ::testing::internal::SingleFailureChecker gtest_checker(\ - >est_failures, ::testing::TestPartResult::kFatalFailure, (substr));\ - {\ - ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\ - ::testing::ScopedFakeTestPartResultReporter:: \ - INTERCEPT_ONLY_CURRENT_THREAD, >est_failures);\ - GTestExpectFatalFailureHelper::Execute();\ - }\ +#define EXPECT_FATAL_FAILURE(statement, substr) \ + do { \ + class GTestExpectFatalFailureHelper { \ + public: \ + static void Execute() { statement; } \ + }; \ + ::testing::TestPartResultArray gtest_failures; \ + ::testing::internal::SingleFailureChecker gtest_checker( \ + >est_failures, ::testing::TestPartResult::kFatalFailure, (substr)); \ + { \ + ::testing::ScopedFakeTestPartResultReporter gtest_reporter( \ + ::testing::ScopedFakeTestPartResultReporter:: \ + INTERCEPT_ONLY_CURRENT_THREAD, \ + >est_failures); \ + GTestExpectFatalFailureHelper::Execute(); \ + } \ } while (::testing::internal::AlwaysFalse()) -#define EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substr) \ - do { \ - class GTestExpectFatalFailureHelper {\ - public:\ - static void Execute() { statement; }\ - };\ - ::testing::TestPartResultArray gtest_failures;\ - ::testing::internal::SingleFailureChecker gtest_checker(\ - >est_failures, ::testing::TestPartResult::kFatalFailure, (substr));\ - {\ - ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\ - ::testing::ScopedFakeTestPartResultReporter:: \ - INTERCEPT_ALL_THREADS, >est_failures);\ - GTestExpectFatalFailureHelper::Execute();\ - }\ +#define EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substr) \ + do { \ + class GTestExpectFatalFailureHelper { \ + public: \ + static void Execute() { statement; } \ + }; \ + ::testing::TestPartResultArray gtest_failures; \ + ::testing::internal::SingleFailureChecker gtest_checker( \ + >est_failures, ::testing::TestPartResult::kFatalFailure, (substr)); \ + { \ + ::testing::ScopedFakeTestPartResultReporter gtest_reporter( \ + ::testing::ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, \ + >est_failures); \ + GTestExpectFatalFailureHelper::Execute(); \ + } \ } while (::testing::internal::AlwaysFalse()) // A macro for testing Google Test assertions or code that's expected to -// generate Google Test non-fatal failures. It asserts that the given -// statement will cause exactly one non-fatal Google Test failure with 'substr' -// being part of the failure message. +// generate Google Test non-fatal failures (e.g. a failure from an EXPECT_EQ, +// but not from an ASSERT_EQ). It asserts that the given statement will cause +// exactly one non-fatal Google Test failure with 'substr' being part of the +// failure message. // // There are two different versions of this macro. EXPECT_NONFATAL_FAILURE only // affects and considers failures generated in the current thread and @@ -207,32 +214,37 @@ GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 // instead of // GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) // to avoid an MSVC warning on unreachable code. -#define EXPECT_NONFATAL_FAILURE(statement, substr) \ - do {\ - ::testing::TestPartResultArray gtest_failures;\ - ::testing::internal::SingleFailureChecker gtest_checker(\ +#define EXPECT_NONFATAL_FAILURE(statement, substr) \ + do { \ + ::testing::TestPartResultArray gtest_failures; \ + ::testing::internal::SingleFailureChecker gtest_checker( \ >est_failures, ::testing::TestPartResult::kNonFatalFailure, \ - (substr));\ - {\ - ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\ - ::testing::ScopedFakeTestPartResultReporter:: \ - INTERCEPT_ONLY_CURRENT_THREAD, >est_failures);\ - if (::testing::internal::AlwaysTrue()) { statement; }\ - }\ + (substr)); \ + { \ + ::testing::ScopedFakeTestPartResultReporter gtest_reporter( \ + ::testing::ScopedFakeTestPartResultReporter:: \ + INTERCEPT_ONLY_CURRENT_THREAD, \ + >est_failures); \ + if (::testing::internal::AlwaysTrue()) { \ + statement; \ + } \ + } \ } while (::testing::internal::AlwaysFalse()) -#define EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substr) \ - do {\ - ::testing::TestPartResultArray gtest_failures;\ - ::testing::internal::SingleFailureChecker gtest_checker(\ - >est_failures, ::testing::TestPartResult::kNonFatalFailure, \ - (substr));\ - {\ - ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\ +#define EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substr) \ + do { \ + ::testing::TestPartResultArray gtest_failures; \ + ::testing::internal::SingleFailureChecker gtest_checker( \ + >est_failures, ::testing::TestPartResult::kNonFatalFailure, \ + (substr)); \ + { \ + ::testing::ScopedFakeTestPartResultReporter gtest_reporter( \ ::testing::ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, \ - >est_failures);\ - if (::testing::internal::AlwaysTrue()) { statement; }\ - }\ + >est_failures); \ + if (::testing::internal::AlwaysTrue()) { \ + statement; \ + } \ + } \ } while (::testing::internal::AlwaysFalse()) #endif // GOOGLETEST_INCLUDE_GTEST_GTEST_SPI_H_ diff --git a/deps/googletest/include/gtest/gtest-test-part.h b/deps/googletest/include/gtest/gtest-test-part.h index 203fdf98c69fcd..8290b4d653d167 100644 --- a/deps/googletest/include/gtest/gtest-test-part.h +++ b/deps/googletest/include/gtest/gtest-test-part.h @@ -26,14 +26,19 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// -// GOOGLETEST_CM0001 DO NOT DELETE + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_TEST_PART_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_TEST_PART_H_ #include +#include +#include #include + #include "gtest/internal/gtest-internal.h" #include "gtest/internal/gtest-string.h" @@ -142,7 +147,8 @@ class GTEST_API_ TestPartResultArray { private: std::vector array_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestPartResultArray); + TestPartResultArray(const TestPartResultArray&) = delete; + TestPartResultArray& operator=(const TestPartResultArray&) = delete; }; // This interface knows how to report a test part result. @@ -168,11 +174,13 @@ class GTEST_API_ HasNewFatalFailureHelper ~HasNewFatalFailureHelper() override; void ReportTestPartResult(const TestPartResult& result) override; bool has_new_fatal_failure() const { return has_new_fatal_failure_; } + private: bool has_new_fatal_failure_; TestPartResultReporterInterface* original_reporter_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(HasNewFatalFailureHelper); + HasNewFatalFailureHelper(const HasNewFatalFailureHelper&) = delete; + HasNewFatalFailureHelper& operator=(const HasNewFatalFailureHelper&) = delete; }; } // namespace internal diff --git a/deps/googletest/include/gtest/gtest-typed-test.h b/deps/googletest/include/gtest/gtest-typed-test.h index 9fdc6be10dc26b..bd35a326601304 100644 --- a/deps/googletest/include/gtest/gtest-typed-test.h +++ b/deps/googletest/include/gtest/gtest-typed-test.h @@ -27,7 +27,9 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_ @@ -190,7 +192,7 @@ INSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes); typedef ::testing::internal::GenerateTypeList::type \ GTEST_TYPE_PARAMS_(CaseName); \ typedef ::testing::internal::NameGeneratorSelector<__VA_ARGS__>::type \ - GTEST_NAME_GENERATOR_(CaseName) + GTEST_NAME_GENERATOR_(CaseName) #define TYPED_TEST(CaseName, TestName) \ static_assert(sizeof(GTEST_STRINGIFY_(TestName)) > 1, \ @@ -256,7 +258,7 @@ INSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes); // #included in multiple translation units linked together. #define TYPED_TEST_SUITE_P(SuiteName) \ static ::testing::internal::TypedTestSuitePState \ - GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName) + GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName) // Legacy API is deprecated but still available #ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ @@ -301,21 +303,21 @@ INSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes); REGISTER_TYPED_TEST_SUITE_P #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ -#define INSTANTIATE_TYPED_TEST_SUITE_P(Prefix, SuiteName, Types, ...) \ - static_assert(sizeof(GTEST_STRINGIFY_(Prefix)) > 1, \ - "test-suit-prefix must not be empty"); \ - static bool gtest_##Prefix##_##SuiteName GTEST_ATTRIBUTE_UNUSED_ = \ - ::testing::internal::TypeParameterizedTestSuite< \ - SuiteName, GTEST_SUITE_NAMESPACE_(SuiteName)::gtest_AllTests_, \ - ::testing::internal::GenerateTypeList::type>:: \ - Register(GTEST_STRINGIFY_(Prefix), \ - ::testing::internal::CodeLocation(__FILE__, __LINE__), \ - >EST_TYPED_TEST_SUITE_P_STATE_(SuiteName), \ - GTEST_STRINGIFY_(SuiteName), \ - GTEST_REGISTERED_TEST_NAMES_(SuiteName), \ - ::testing::internal::GenerateNames< \ - ::testing::internal::NameGeneratorSelector< \ - __VA_ARGS__>::type, \ +#define INSTANTIATE_TYPED_TEST_SUITE_P(Prefix, SuiteName, Types, ...) \ + static_assert(sizeof(GTEST_STRINGIFY_(Prefix)) > 1, \ + "test-suit-prefix must not be empty"); \ + static bool gtest_##Prefix##_##SuiteName GTEST_ATTRIBUTE_UNUSED_ = \ + ::testing::internal::TypeParameterizedTestSuite< \ + SuiteName, GTEST_SUITE_NAMESPACE_(SuiteName)::gtest_AllTests_, \ + ::testing::internal::GenerateTypeList::type>:: \ + Register(GTEST_STRINGIFY_(Prefix), \ + ::testing::internal::CodeLocation(__FILE__, __LINE__), \ + >EST_TYPED_TEST_SUITE_P_STATE_(SuiteName), \ + GTEST_STRINGIFY_(SuiteName), \ + GTEST_REGISTERED_TEST_NAMES_(SuiteName), \ + ::testing::internal::GenerateNames< \ + ::testing::internal::NameGeneratorSelector< \ + __VA_ARGS__>::type, \ ::testing::internal::GenerateTypeList::type>()) // Legacy API is deprecated but still available diff --git a/deps/googletest/include/gtest/gtest.h b/deps/googletest/include/gtest/gtest.h index 482228a6a49b93..3e452a503f8950 100644 --- a/deps/googletest/include/gtest/gtest.h +++ b/deps/googletest/include/gtest/gtest.h @@ -27,7 +27,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// // The Google C++ Testing and Mocking Framework (Google Test) // // This header file defines the public API for Google Test. It should be @@ -47,28 +46,33 @@ // registration from Barthelemy Dagenais' (barthelemy@prologique.com) // easyUnit framework. -// GOOGLETEST_CM0001 DO NOT DELETE - #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_H_ #include +#include +#include #include #include #include +#include +#include +#include #include #include -#include "gtest/internal/gtest-internal.h" -#include "gtest/internal/gtest-string.h" +#include "gtest/gtest-assertion-result.h" #include "gtest/gtest-death-test.h" #include "gtest/gtest-matchers.h" #include "gtest/gtest-message.h" #include "gtest/gtest-param-test.h" #include "gtest/gtest-printers.h" -#include "gtest/gtest_prod.h" #include "gtest/gtest-test-part.h" #include "gtest/gtest-typed-test.h" +#include "gtest/gtest_pred_impl.h" +#include "gtest/gtest_prod.h" +#include "gtest/internal/gtest-internal.h" +#include "gtest/internal/gtest-string.h" GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ /* class A needs to have dll-interface to be used by clients of class B */) @@ -191,6 +195,17 @@ void ReportFailureInUnknownLocation(TestPartResult::Type result_type, const std::string& message); std::set* GetIgnoredParameterizedTestSuites(); +// A base class that prevents subclasses from being copyable. +// We do this instead of using '= delete' so as to avoid triggering warnings +// inside user code regarding any of our declarations. +class GTestNonCopyable { + public: + GTestNonCopyable() = default; + GTestNonCopyable(const GTestNonCopyable &) = delete; + GTestNonCopyable &operator=(const GTestNonCopyable &) = delete; + ~GTestNonCopyable() = default; +}; + } // namespace internal // The friend relationship of some of these classes is cyclic. @@ -206,193 +221,6 @@ using TestCase = TestSuite; class TestInfo; class UnitTest; -// A class for indicating whether an assertion was successful. When -// the assertion wasn't successful, the AssertionResult object -// remembers a non-empty message that describes how it failed. -// -// To create an instance of this class, use one of the factory functions -// (AssertionSuccess() and AssertionFailure()). -// -// This class is useful for two purposes: -// 1. Defining predicate functions to be used with Boolean test assertions -// EXPECT_TRUE/EXPECT_FALSE and their ASSERT_ counterparts -// 2. Defining predicate-format functions to be -// used with predicate assertions (ASSERT_PRED_FORMAT*, etc). -// -// For example, if you define IsEven predicate: -// -// testing::AssertionResult IsEven(int n) { -// if ((n % 2) == 0) -// return testing::AssertionSuccess(); -// else -// return testing::AssertionFailure() << n << " is odd"; -// } -// -// Then the failed expectation EXPECT_TRUE(IsEven(Fib(5))) -// will print the message -// -// Value of: IsEven(Fib(5)) -// Actual: false (5 is odd) -// Expected: true -// -// instead of a more opaque -// -// Value of: IsEven(Fib(5)) -// Actual: false -// Expected: true -// -// in case IsEven is a simple Boolean predicate. -// -// If you expect your predicate to be reused and want to support informative -// messages in EXPECT_FALSE and ASSERT_FALSE (negative assertions show up -// about half as often as positive ones in our tests), supply messages for -// both success and failure cases: -// -// testing::AssertionResult IsEven(int n) { -// if ((n % 2) == 0) -// return testing::AssertionSuccess() << n << " is even"; -// else -// return testing::AssertionFailure() << n << " is odd"; -// } -// -// Then a statement EXPECT_FALSE(IsEven(Fib(6))) will print -// -// Value of: IsEven(Fib(6)) -// Actual: true (8 is even) -// Expected: false -// -// NB: Predicates that support negative Boolean assertions have reduced -// performance in positive ones so be careful not to use them in tests -// that have lots (tens of thousands) of positive Boolean assertions. -// -// To use this class with EXPECT_PRED_FORMAT assertions such as: -// -// // Verifies that Foo() returns an even number. -// EXPECT_PRED_FORMAT1(IsEven, Foo()); -// -// you need to define: -// -// testing::AssertionResult IsEven(const char* expr, int n) { -// if ((n % 2) == 0) -// return testing::AssertionSuccess(); -// else -// return testing::AssertionFailure() -// << "Expected: " << expr << " is even\n Actual: it's " << n; -// } -// -// If Foo() returns 5, you will see the following message: -// -// Expected: Foo() is even -// Actual: it's 5 -// -class GTEST_API_ AssertionResult { - public: - // Copy constructor. - // Used in EXPECT_TRUE/FALSE(assertion_result). - AssertionResult(const AssertionResult& other); - -// C4800 is a level 3 warning in Visual Studio 2015 and earlier. -// This warning is not emitted in Visual Studio 2017. -// This warning is off by default starting in Visual Studio 2019 but can be -// enabled with command-line options. -#if defined(_MSC_VER) && (_MSC_VER < 1910 || _MSC_VER >= 1920) - GTEST_DISABLE_MSC_WARNINGS_PUSH_(4800 /* forcing value to bool */) -#endif - - // Used in the EXPECT_TRUE/FALSE(bool_expression). - // - // T must be contextually convertible to bool. - // - // The second parameter prevents this overload from being considered if - // the argument is implicitly convertible to AssertionResult. In that case - // we want AssertionResult's copy constructor to be used. - template - explicit AssertionResult( - const T& success, - typename std::enable_if< - !std::is_convertible::value>::type* - /*enabler*/ - = nullptr) - : success_(success) {} - -#if defined(_MSC_VER) && (_MSC_VER < 1910 || _MSC_VER >= 1920) - GTEST_DISABLE_MSC_WARNINGS_POP_() -#endif - - // Assignment operator. - AssertionResult& operator=(AssertionResult other) { - swap(other); - return *this; - } - - // Returns true if and only if the assertion succeeded. - operator bool() const { return success_; } // NOLINT - - // Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE. - AssertionResult operator!() const; - - // Returns the text streamed into this AssertionResult. Test assertions - // use it when they fail (i.e., the predicate's outcome doesn't match the - // assertion's expectation). When nothing has been streamed into the - // object, returns an empty string. - const char* message() const { - return message_.get() != nullptr ? message_->c_str() : ""; - } - // Deprecated; please use message() instead. - const char* failure_message() const { return message(); } - - // Streams a custom failure message into this object. - template AssertionResult& operator<<(const T& value) { - AppendMessage(Message() << value); - return *this; - } - - // Allows streaming basic output manipulators such as endl or flush into - // this object. - AssertionResult& operator<<( - ::std::ostream& (*basic_manipulator)(::std::ostream& stream)) { - AppendMessage(Message() << basic_manipulator); - return *this; - } - - private: - // Appends the contents of message to message_. - void AppendMessage(const Message& a_message) { - if (message_.get() == nullptr) message_.reset(new ::std::string); - message_->append(a_message.GetString().c_str()); - } - - // Swap the contents of this AssertionResult with other. - void swap(AssertionResult& other); - - // Stores result of the assertion predicate. - bool success_; - // Stores the message describing the condition in case the expectation - // construct is not satisfied with the predicate's outcome. - // Referenced via a pointer to avoid taking too much stack frame space - // with test assertions. - std::unique_ptr< ::std::string> message_; -}; - -// Makes a successful assertion result. -GTEST_API_ AssertionResult AssertionSuccess(); - -// Makes a failed assertion result. -GTEST_API_ AssertionResult AssertionFailure(); - -// Makes a failed assertion result with the given failure message. -// Deprecated; use AssertionFailure() << msg. -GTEST_API_ AssertionResult AssertionFailure(const Message& msg); - -} // namespace testing - -// Includes the auto-generated header that implements a family of generic -// predicate assertion macros. This include comes late because it relies on -// APIs declared above. -#include "gtest/gtest_pred_impl.h" - -namespace testing { - // The abstract class that all tests inherit from. // // In Google Test, a unit test program contains one or many TestSuites, and @@ -473,7 +301,7 @@ class GTEST_API_ Test { // SetUp/TearDown method of Environment objects registered with Google // Test) will be output as attributes of the element. static void RecordProperty(const std::string& key, const std::string& value); - static void RecordProperty(const std::string& key, int value); + static void RecordProperty(const std::string& key, int64_t value); protected: // Creates a Test object. @@ -527,7 +355,8 @@ class GTEST_API_ Test { virtual Setup_should_be_spelled_SetUp* Setup() { return nullptr; } // We disallow copying Tests. - GTEST_DISALLOW_COPY_AND_ASSIGN_(Test); + Test(const Test&) = delete; + Test& operator=(const Test&) = delete; }; typedef internal::TimeInMillis TimeInMillis; @@ -541,24 +370,17 @@ class TestProperty { // C'tor. TestProperty does NOT have a default constructor. // Always use this constructor (with parameters) to create a // TestProperty object. - TestProperty(const std::string& a_key, const std::string& a_value) : - key_(a_key), value_(a_value) { - } + TestProperty(const std::string& a_key, const std::string& a_value) + : key_(a_key), value_(a_value) {} // Gets the user supplied key. - const char* key() const { - return key_.c_str(); - } + const char* key() const { return key_.c_str(); } // Gets the user supplied value. - const char* value() const { - return value_.c_str(); - } + const char* value() const { return value_.c_str(); } // Sets a new value, overriding the one supplied in the constructor. - void SetValue(const std::string& new_value) { - value_ = new_value; - } + void SetValue(const std::string& new_value) { value_ = new_value; } private: // The key supplied by the user. @@ -692,7 +514,8 @@ class GTEST_API_ TestResult { TimeInMillis elapsed_time_; // We disallow copying TestResult. - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestResult); + TestResult(const TestResult&) = delete; + TestResult& operator=(const TestResult&) = delete; }; // class TestResult // A TestInfo object stores the following information about a test: @@ -816,8 +639,8 @@ class GTEST_API_ TestInfo { } // These fields are immutable properties of the test. - const std::string test_suite_name_; // test suite name - const std::string name_; // Test name + const std::string test_suite_name_; // test suite name + const std::string name_; // Test name // Name of the parameter type, or NULL if this is not a typed or a // type-parameterized test. const std::unique_ptr type_param_; @@ -838,7 +661,8 @@ class GTEST_API_ TestInfo { // test for the second time. TestResult result_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestInfo); + TestInfo(const TestInfo&) = delete; + TestInfo& operator=(const TestInfo&) = delete; }; // A test suite, which consists of a vector of TestInfos. @@ -946,7 +770,7 @@ class GTEST_API_ TestSuite { // Adds a TestInfo to this test suite. Will delete the TestInfo upon // destruction of the TestSuite object. - void AddTestInfo(TestInfo * test_info); + void AddTestInfo(TestInfo* test_info); // Clears the results of all tests in this test suite. void ClearResult(); @@ -1047,7 +871,8 @@ class GTEST_API_ TestSuite { TestResult ad_hoc_test_result_; // We disallow copying TestSuites. - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestSuite); + TestSuite(const TestSuite&) = delete; + TestSuite& operator=(const TestSuite&) = delete; }; // An Environment object is capable of setting up and tearing down an @@ -1074,6 +899,7 @@ class Environment { // Override this to define how to tear down the environment. virtual void TearDown() {} + private: // If you see an error about overriding the following function or // about it being private, you have mis-spelled SetUp() as Setup(). @@ -1125,6 +951,9 @@ class TestEventListener { // Fired before the test starts. virtual void OnTestStart(const TestInfo& test_info) = 0; + // Fired when a test is disabled + virtual void OnTestDisabled(const TestInfo& /*test_info*/) {} + // Fired after a failed assertion or a SUCCEED() invocation. // If you want to throw an exception from this function to skip to the next // TEST, it must be AssertionException defined above, or inherited from it. @@ -1148,8 +977,7 @@ class TestEventListener { virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test) = 0; // Fired after each iteration of tests finishes. - virtual void OnTestIterationEnd(const UnitTest& unit_test, - int iteration) = 0; + virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration) = 0; // Fired after all test activities have ended. virtual void OnTestProgramEnd(const UnitTest& unit_test) = 0; @@ -1174,6 +1002,7 @@ class EmptyTestEventListener : public TestEventListener { #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ void OnTestStart(const TestInfo& /*test_info*/) override {} + void OnTestDisabled(const TestInfo& /*test_info*/) override {} void OnTestPartResult(const TestPartResult& /*test_part_result*/) override {} void OnTestEnd(const TestInfo& /*test_info*/) override {} void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {} @@ -1263,7 +1092,8 @@ class GTEST_API_ TestEventListeners { TestEventListener* default_xml_generator_; // We disallow copying TestEventListeners. - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventListeners); + TestEventListeners(const TestEventListeners&) = delete; + TestEventListeners& operator=(const TestEventListeners&) = delete; }; // A UnitTest consists of a vector of TestSuites. @@ -1306,8 +1136,7 @@ class GTEST_API_ UnitTest { // Returns the TestInfo object for the test that's currently running, // or NULL if no test is running. - const TestInfo* current_test_info() const - GTEST_LOCK_EXCLUDED_(mutex_); + const TestInfo* current_test_info() const GTEST_LOCK_EXCLUDED_(mutex_); // Returns the random seed used at the start of the current test run. int random_seed() const; @@ -1413,8 +1242,7 @@ class GTEST_API_ UnitTest { // eventually call this to report their results. The user code // should use the assertion macros instead of calling this directly. void AddTestPartResult(TestPartResult::Type result_type, - const char* file_name, - int line_number, + const char* file_name, int line_number, const std::string& message, const std::string& os_stack_trace) GTEST_LOCK_EXCLUDED_(mutex_); @@ -1445,8 +1273,7 @@ class GTEST_API_ UnitTest { friend std::set* internal::GetIgnoredParameterizedTestSuites(); friend internal::UnitTestImpl* internal::GetUnitTestImpl(); friend void internal::ReportFailureInUnknownLocation( - TestPartResult::Type result_type, - const std::string& message); + TestPartResult::Type result_type, const std::string& message); // Creates an empty UnitTest. UnitTest(); @@ -1460,8 +1287,7 @@ class GTEST_API_ UnitTest { GTEST_LOCK_EXCLUDED_(mutex_); // Pops a trace from the per-thread Google Test trace stack. - void PopGTestTrace() - GTEST_LOCK_EXCLUDED_(mutex_); + void PopGTestTrace() GTEST_LOCK_EXCLUDED_(mutex_); // Protects mutable state in *impl_. This is mutable as some const // methods need to lock it too. @@ -1474,7 +1300,8 @@ class GTEST_API_ UnitTest { internal::UnitTestImpl* impl_; // We disallow copying UnitTest. - GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTest); + UnitTest(const UnitTest&) = delete; + UnitTest& operator=(const UnitTest&) = delete; }; // A convenient wrapper for adding an environment for the test @@ -1525,13 +1352,11 @@ namespace internal { // when calling EXPECT_* in a tight loop. template AssertionResult CmpHelperEQFailure(const char* lhs_expression, - const char* rhs_expression, - const T1& lhs, const T2& rhs) { - return EqFailure(lhs_expression, - rhs_expression, + const char* rhs_expression, const T1& lhs, + const T2& rhs) { + return EqFailure(lhs_expression, rhs_expression, FormatForComparisonFailureMessage(lhs, rhs), - FormatForComparisonFailureMessage(rhs, lhs), - false); + FormatForComparisonFailureMessage(rhs, lhs), false); } // This block of code defines operator==/!= @@ -1544,8 +1369,7 @@ inline bool operator!=(faketype, faketype) { return false; } // The helper function for {ASSERT|EXPECT}_EQ. template AssertionResult CmpHelperEQ(const char* lhs_expression, - const char* rhs_expression, - const T1& lhs, + const char* rhs_expression, const T1& lhs, const T2& rhs) { if (lhs == rhs) { return AssertionSuccess(); @@ -1576,8 +1400,7 @@ class EqHelper { // Even though its body looks the same as the above version, we // cannot merge the two, as it will make anonymous enums unhappy. static AssertionResult Compare(const char* lhs_expression, - const char* rhs_expression, - BiggestInt lhs, + const char* rhs_expression, BiggestInt lhs, BiggestInt rhs) { return CmpHelperEQ(lhs_expression, rhs_expression, lhs, rhs); } @@ -1612,16 +1435,16 @@ AssertionResult CmpHelperOpFailure(const char* expr1, const char* expr2, // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. -#define GTEST_IMPL_CMP_HELPER_(op_name, op)\ -template \ -AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \ - const T1& val1, const T2& val2) {\ - if (val1 op val2) {\ - return AssertionSuccess();\ - } else {\ - return CmpHelperOpFailure(expr1, expr2, val1, val2, #op);\ - }\ -} +#define GTEST_IMPL_CMP_HELPER_(op_name, op) \ + template \ + AssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \ + const T1& val1, const T2& val2) { \ + if (val1 op val2) { \ + return AssertionSuccess(); \ + } else { \ + return CmpHelperOpFailure(expr1, expr2, val1, val2, #op); \ + } \ + } // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. @@ -1643,49 +1466,42 @@ GTEST_IMPL_CMP_HELPER_(GT, >) // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTREQ(const char* s1_expression, const char* s2_expression, - const char* s1, - const char* s2); + const char* s1, const char* s2); // The helper function for {ASSERT|EXPECT}_STRCASEEQ. // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTRCASEEQ(const char* s1_expression, const char* s2_expression, - const char* s1, - const char* s2); + const char* s1, const char* s2); // The helper function for {ASSERT|EXPECT}_STRNE. // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression, const char* s2_expression, - const char* s1, - const char* s2); + const char* s1, const char* s2); // The helper function for {ASSERT|EXPECT}_STRCASENE. // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTRCASENE(const char* s1_expression, const char* s2_expression, - const char* s1, - const char* s2); - + const char* s1, const char* s2); // Helper function for *_STREQ on wide strings. // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTREQ(const char* s1_expression, const char* s2_expression, - const wchar_t* s1, - const wchar_t* s2); + const wchar_t* s1, const wchar_t* s2); // Helper function for *_STRNE on wide strings. // // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM. GTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression, const char* s2_expression, - const wchar_t* s1, - const wchar_t* s2); + const wchar_t* s1, const wchar_t* s2); } // namespace internal @@ -1697,32 +1513,40 @@ GTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression, // // The {needle,haystack}_expr arguments are the stringified // expressions that generated the two real arguments. -GTEST_API_ AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const char* needle, const char* haystack); -GTEST_API_ AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const wchar_t* needle, const wchar_t* haystack); -GTEST_API_ AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const char* needle, const char* haystack); -GTEST_API_ AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const wchar_t* needle, const wchar_t* haystack); -GTEST_API_ AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::string& needle, const ::std::string& haystack); -GTEST_API_ AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::string& needle, const ::std::string& haystack); +GTEST_API_ AssertionResult IsSubstring(const char* needle_expr, + const char* haystack_expr, + const char* needle, + const char* haystack); +GTEST_API_ AssertionResult IsSubstring(const char* needle_expr, + const char* haystack_expr, + const wchar_t* needle, + const wchar_t* haystack); +GTEST_API_ AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const char* needle, + const char* haystack); +GTEST_API_ AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const wchar_t* needle, + const wchar_t* haystack); +GTEST_API_ AssertionResult IsSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::string& needle, + const ::std::string& haystack); +GTEST_API_ AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::string& needle, + const ::std::string& haystack); #if GTEST_HAS_STD_WSTRING -GTEST_API_ AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::wstring& needle, const ::std::wstring& haystack); -GTEST_API_ AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::wstring& needle, const ::std::wstring& haystack); +GTEST_API_ AssertionResult IsSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::wstring& needle, + const ::std::wstring& haystack); +GTEST_API_ AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::wstring& needle, + const ::std::wstring& haystack); #endif // GTEST_HAS_STD_WSTRING namespace internal { @@ -1737,8 +1561,7 @@ namespace internal { template AssertionResult CmpHelperFloatingPointEQ(const char* lhs_expression, const char* rhs_expression, - RawType lhs_value, - RawType rhs_value) { + RawType lhs_value, RawType rhs_value) { const FloatingPoint lhs(lhs_value), rhs(rhs_value); if (lhs.AlmostEquals(rhs)) { @@ -1753,10 +1576,8 @@ AssertionResult CmpHelperFloatingPointEQ(const char* lhs_expression, rhs_ss << std::setprecision(std::numeric_limits::digits10 + 2) << rhs_value; - return EqFailure(lhs_expression, - rhs_expression, - StringStreamToString(&lhs_ss), - StringStreamToString(&rhs_ss), + return EqFailure(lhs_expression, rhs_expression, + StringStreamToString(&lhs_ss), StringStreamToString(&rhs_ss), false); } @@ -1766,8 +1587,7 @@ AssertionResult CmpHelperFloatingPointEQ(const char* lhs_expression, GTEST_API_ AssertionResult DoubleNearPredFormat(const char* expr1, const char* expr2, const char* abs_error_expr, - double val1, - double val2, + double val1, double val2, double abs_error); // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. @@ -1775,9 +1595,7 @@ GTEST_API_ AssertionResult DoubleNearPredFormat(const char* expr1, class GTEST_API_ AssertHelper { public: // Constructor. - AssertHelper(TestPartResult::Type type, - const char* file, - int line, + AssertHelper(TestPartResult::Type type, const char* file, int line, const char* message); ~AssertHelper(); @@ -1791,11 +1609,9 @@ class GTEST_API_ AssertHelper { // re-using stack space even for temporary variables, so every EXPECT_EQ // reserves stack space for another AssertHelper. struct AssertHelperData { - AssertHelperData(TestPartResult::Type t, - const char* srcfile, - int line_num, + AssertHelperData(TestPartResult::Type t, const char* srcfile, int line_num, const char* msg) - : type(t), file(srcfile), line(line_num), message(msg) { } + : type(t), file(srcfile), line(line_num), message(msg) {} TestPartResult::Type const type; const char* const file; @@ -1803,12 +1619,14 @@ class GTEST_API_ AssertHelper { std::string const message; private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelperData); + AssertHelperData(const AssertHelperData&) = delete; + AssertHelperData& operator=(const AssertHelperData&) = delete; }; AssertHelperData* const data_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelper); + AssertHelper(const AssertHelper&) = delete; + AssertHelper& operator=(const AssertHelper&) = delete; }; } // namespace internal @@ -1823,7 +1641,7 @@ class GTEST_API_ AssertHelper { // the GetParam() method. // // Use it with one of the parameter generator defining functions, like Range(), -// Values(), ValuesIn(), Bool(), and Combine(). +// Values(), ValuesIn(), Bool(), Combine(), and ConvertGenerator(). // // class FooTest : public ::testing::TestWithParam { // protected: @@ -1865,15 +1683,14 @@ class WithParamInterface { private: // Sets parameter value. The caller is responsible for making sure the value // remains alive and unchanged throughout the current test. - static void SetParam(const ParamType* parameter) { - parameter_ = parameter; - } + static void SetParam(const ParamType* parameter) { parameter_ = parameter; } // Static value used for accessing parameter during a test lifetime. static const ParamType* parameter_; // TestClass must be a subclass of WithParamInterface and Test. - template friend class internal::ParameterizedTestFactory; + template + friend class internal::ParameterizedTestFactory; }; template @@ -1883,8 +1700,7 @@ const T* WithParamInterface::parameter_ = nullptr; // WithParamInterface, and can just inherit from ::testing::TestWithParam. template -class TestWithParam : public Test, public WithParamInterface { -}; +class TestWithParam : public Test, public WithParamInterface {}; // Macros for indicating success/failure in test code. @@ -1915,7 +1731,7 @@ class TestWithParam : public Test, public WithParamInterface { // Generates a nonfatal failure at the given source file location with // a generic message. -#define ADD_FAILURE_AT(file, line) \ +#define ADD_FAILURE_AT(file, line) \ GTEST_MESSAGE_AT_(file, line, "Failed", \ ::testing::TestPartResult::kNonFatalFailure) @@ -1923,14 +1739,14 @@ class TestWithParam : public Test, public WithParamInterface { #define GTEST_FAIL() GTEST_FATAL_FAILURE_("Failed") // Like GTEST_FAIL(), but at the given source file location. -#define GTEST_FAIL_AT(file, line) \ - GTEST_MESSAGE_AT_(file, line, "Failed", \ - ::testing::TestPartResult::kFatalFailure) +#define GTEST_FAIL_AT(file, line) \ + return GTEST_MESSAGE_AT_(file, line, "Failed", \ + ::testing::TestPartResult::kFatalFailure) // Define this macro to 1 to omit the definition of FAIL(), which is a // generic name and clashes with some other libraries. #if !GTEST_DONT_DEFINE_FAIL -# define FAIL() GTEST_FAIL() +#define FAIL() GTEST_FAIL() #endif // Generates a success with a generic message. @@ -1939,7 +1755,7 @@ class TestWithParam : public Test, public WithParamInterface { // Define this macro to 1 to omit the definition of SUCCEED(), which // is a generic name and clashes with some other libraries. #if !GTEST_DONT_DEFINE_SUCCEED -# define SUCCEED() GTEST_SUCCEED() +#define SUCCEED() GTEST_SUCCEED() #endif // Macros for testing exceptions. @@ -1967,16 +1783,15 @@ class TestWithParam : public Test, public WithParamInterface { // Boolean assertions. Condition can be either a Boolean expression or an // AssertionResult. For more information on how to use AssertionResult with // these macros see comments on that class. -#define GTEST_EXPECT_TRUE(condition) \ +#define GTEST_EXPECT_TRUE(condition) \ GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \ GTEST_NONFATAL_FAILURE_) -#define GTEST_EXPECT_FALSE(condition) \ +#define GTEST_EXPECT_FALSE(condition) \ GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \ GTEST_NONFATAL_FAILURE_) #define GTEST_ASSERT_TRUE(condition) \ - GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \ - GTEST_FATAL_FAILURE_) -#define GTEST_ASSERT_FALSE(condition) \ + GTEST_TEST_BOOLEAN_(condition, #condition, false, true, GTEST_FATAL_FAILURE_) +#define GTEST_ASSERT_FALSE(condition) \ GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \ GTEST_FATAL_FAILURE_) @@ -2075,27 +1890,27 @@ class TestWithParam : public Test, public WithParamInterface { // ASSERT_XY(), which clashes with some users' own code. #if !GTEST_DONT_DEFINE_ASSERT_EQ -# define ASSERT_EQ(val1, val2) GTEST_ASSERT_EQ(val1, val2) +#define ASSERT_EQ(val1, val2) GTEST_ASSERT_EQ(val1, val2) #endif #if !GTEST_DONT_DEFINE_ASSERT_NE -# define ASSERT_NE(val1, val2) GTEST_ASSERT_NE(val1, val2) +#define ASSERT_NE(val1, val2) GTEST_ASSERT_NE(val1, val2) #endif #if !GTEST_DONT_DEFINE_ASSERT_LE -# define ASSERT_LE(val1, val2) GTEST_ASSERT_LE(val1, val2) +#define ASSERT_LE(val1, val2) GTEST_ASSERT_LE(val1, val2) #endif #if !GTEST_DONT_DEFINE_ASSERT_LT -# define ASSERT_LT(val1, val2) GTEST_ASSERT_LT(val1, val2) +#define ASSERT_LT(val1, val2) GTEST_ASSERT_LT(val1, val2) #endif #if !GTEST_DONT_DEFINE_ASSERT_GE -# define ASSERT_GE(val1, val2) GTEST_ASSERT_GE(val1, val2) +#define ASSERT_GE(val1, val2) GTEST_ASSERT_GE(val1, val2) #endif #if !GTEST_DONT_DEFINE_ASSERT_GT -# define ASSERT_GT(val1, val2) GTEST_ASSERT_GT(val1, val2) +#define ASSERT_GT(val1, val2) GTEST_ASSERT_GT(val1, val2) #endif // C-string Comparisons. All tests treat NULL and any non-NULL string @@ -2120,7 +1935,7 @@ class TestWithParam : public Test, public WithParamInterface { EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2) #define EXPECT_STRCASEEQ(s1, s2) \ EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, s1, s2) -#define EXPECT_STRCASENE(s1, s2)\ +#define EXPECT_STRCASENE(s1, s2) \ EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2) #define ASSERT_STREQ(s1, s2) \ @@ -2129,7 +1944,7 @@ class TestWithParam : public Test, public WithParamInterface { ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2) #define ASSERT_STRCASEEQ(s1, s2) \ ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, s1, s2) -#define ASSERT_STRCASENE(s1, s2)\ +#define ASSERT_STRCASENE(s1, s2) \ ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2) // Macros for comparing floating-point numbers. @@ -2146,29 +1961,29 @@ class TestWithParam : public Test, public WithParamInterface { // FloatingPoint template class in gtest-internal.h if you are // interested in the implementation details. -#define EXPECT_FLOAT_EQ(val1, val2)\ +#define EXPECT_FLOAT_EQ(val1, val2) \ EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ, \ val1, val2) -#define EXPECT_DOUBLE_EQ(val1, val2)\ +#define EXPECT_DOUBLE_EQ(val1, val2) \ EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ, \ val1, val2) -#define ASSERT_FLOAT_EQ(val1, val2)\ +#define ASSERT_FLOAT_EQ(val1, val2) \ ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ, \ val1, val2) -#define ASSERT_DOUBLE_EQ(val1, val2)\ +#define ASSERT_DOUBLE_EQ(val1, val2) \ ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ, \ val1, val2) -#define EXPECT_NEAR(val1, val2, abs_error)\ - EXPECT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \ - val1, val2, abs_error) +#define EXPECT_NEAR(val1, val2, abs_error) \ + EXPECT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, val1, val2, \ + abs_error) -#define ASSERT_NEAR(val1, val2, abs_error)\ - ASSERT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \ - val1, val2, abs_error) +#define ASSERT_NEAR(val1, val2, abs_error) \ + ASSERT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, val1, val2, \ + abs_error) // These predicate format functions work on floating-point values, and // can be used in {ASSERT|EXPECT}_PRED_FORMAT2*(), e.g. @@ -2182,7 +1997,6 @@ GTEST_API_ AssertionResult FloatLE(const char* expr1, const char* expr2, GTEST_API_ AssertionResult DoubleLE(const char* expr1, const char* expr2, double val1, double val2); - #if GTEST_OS_WINDOWS // Macros that test for HRESULT failure and success, these are only useful @@ -2194,17 +2008,17 @@ GTEST_API_ AssertionResult DoubleLE(const char* expr1, const char* expr2, // expected result and the actual result with both a human-readable // string representation of the error, if available, as well as the // hex result code. -# define EXPECT_HRESULT_SUCCEEDED(expr) \ - EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr)) +#define EXPECT_HRESULT_SUCCEEDED(expr) \ + EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr)) -# define ASSERT_HRESULT_SUCCEEDED(expr) \ - ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr)) +#define ASSERT_HRESULT_SUCCEEDED(expr) \ + ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr)) -# define EXPECT_HRESULT_FAILED(expr) \ - EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr)) +#define EXPECT_HRESULT_FAILED(expr) \ + EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr)) -# define ASSERT_HRESULT_FAILED(expr) \ - ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr)) +#define ASSERT_HRESULT_FAILED(expr) \ + ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr)) #endif // GTEST_OS_WINDOWS @@ -2219,9 +2033,9 @@ GTEST_API_ AssertionResult DoubleLE(const char* expr1, const char* expr2, // ASSERT_NO_FATAL_FAILURE(Process()) << "Process() failed"; // #define ASSERT_NO_FATAL_FAILURE(statement) \ - GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_FATAL_FAILURE_) + GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_FATAL_FAILURE_) #define EXPECT_NO_FATAL_FAILURE(statement) \ - GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_NONFATAL_FAILURE_) + GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_NONFATAL_FAILURE_) // Causes a trace (including the given source file path and line number, // and the given message) to be included in every test failure message generated @@ -2263,10 +2077,9 @@ class GTEST_API_ ScopedTrace { private: void PushTrace(const char* file, int line, std::string message); - GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedTrace); -} GTEST_ATTRIBUTE_UNUSED_; // A ScopedTrace object does its job in its - // c'tor and d'tor. Therefore it doesn't - // need to be used otherwise. + ScopedTrace(const ScopedTrace&) = delete; + ScopedTrace& operator=(const ScopedTrace&) = delete; +}; // Causes a trace (including the source file path, the current line // number, and the given message) to be included in every test failure @@ -2283,9 +2096,9 @@ class GTEST_API_ ScopedTrace { // Assuming that each thread maintains its own stack of traces. // Therefore, a SCOPED_TRACE() would (correctly) only affect the // assertions in its own thread. -#define SCOPED_TRACE(message) \ - ::testing::ScopedTrace GTEST_CONCAT_TOKEN_(gtest_trace_, __LINE__)(\ - __FILE__, __LINE__, (message)) +#define SCOPED_TRACE(message) \ + ::testing::ScopedTrace GTEST_CONCAT_TOKEN_(gtest_trace_, __LINE__)( \ + __FILE__, __LINE__, (message)) // Compile-time assertion for type equality. // StaticAssertTypeEq() compiles if and only if type1 and type2 @@ -2383,20 +2196,26 @@ constexpr bool StaticAssertTypeEq() noexcept { // EXPECT_EQ(a_.size(), 0); // EXPECT_EQ(b_.size(), 1); // } -// -// GOOGLETEST_CM0011 DO NOT DELETE -#if !GTEST_DONT_DEFINE_TEST -#define TEST_F(test_fixture, test_name)\ +#define GTEST_TEST_F(test_fixture, test_name) \ GTEST_TEST_(test_fixture, test_name, test_fixture, \ ::testing::internal::GetTypeId()) -#endif // !GTEST_DONT_DEFINE_TEST +#if !GTEST_DONT_DEFINE_TEST_F +#define TEST_F(test_fixture, test_name) GTEST_TEST_F(test_fixture, test_name) +#endif -// Returns a path to temporary directory. -// Tries to determine an appropriate directory for the platform. +// Returns a path to a temporary directory, which should be writable. It is +// implementation-dependent whether or not the path is terminated by the +// directory-separator character. GTEST_API_ std::string TempDir(); +// Returns a path to a directory that contains ancillary data files that might +// be used by tests. It is implementation dependent whether or not the path is +// terminated by the directory-separator character. The directory and the files +// in it should be considered read-only. +GTEST_API_ std::string SrcDir(); + #ifdef _MSC_VER -# pragma warning(pop) +#pragma warning(pop) #endif // Dynamically registers a test with the framework. @@ -2450,6 +2269,7 @@ GTEST_API_ std::string TempDir(); // } // ... // int main(int argc, char** argv) { +// ::testing::InitGoogleTest(&argc, argv); // std::vector values_to_test = LoadValuesFromConfig(); // RegisterMyTests(values_to_test); // ... @@ -2491,9 +2311,7 @@ TestInfo* RegisterTest(const char* test_suite_name, const char* test_name, // namespace and has an all-caps name. int RUN_ALL_TESTS() GTEST_MUST_USE_RESULT_; -inline int RUN_ALL_TESTS() { - return ::testing::UnitTest::GetInstance()->Run(); -} +inline int RUN_ALL_TESTS() { return ::testing::UnitTest::GetInstance()->Run(); } GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 diff --git a/deps/googletest/include/gtest/gtest_pred_impl.h b/deps/googletest/include/gtest/gtest_pred_impl.h index 5029a9bb02ada7..47a24aa687af6e 100644 --- a/deps/googletest/include/gtest/gtest_pred_impl.h +++ b/deps/googletest/include/gtest/gtest_pred_impl.h @@ -26,17 +26,19 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// This file is AUTOMATICALLY GENERATED on 01/02/2019 by command -// 'gen_gtest_pred_impl.py 5'. DO NOT EDIT BY HAND! // // Implements a family of generic predicate assertion macros. -// GOOGLETEST_CM0001 DO NOT DELETE + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_ -#include "gtest/gtest.h" +#include "gtest/gtest-assertion-result.h" +#include "gtest/internal/gtest-internal.h" +#include "gtest/internal/gtest-port.h" namespace testing { @@ -72,22 +74,18 @@ namespace testing { // GTEST_ASSERT_ is the basic statement to which all of the assertions // in this file reduce. Don't use this in your code. -#define GTEST_ASSERT_(expression, on_failure) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ +#define GTEST_ASSERT_(expression, on_failure) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ if (const ::testing::AssertionResult gtest_ar = (expression)) \ - ; \ - else \ + ; \ + else \ on_failure(gtest_ar.failure_message()) - // Helper function for implementing {EXPECT|ASSERT}_PRED1. Don't use // this in your code. -template -AssertionResult AssertPred1Helper(const char* pred_text, - const char* e1, - Pred pred, - const T1& v1) { +template +AssertionResult AssertPred1Helper(const char* pred_text, const char* e1, + Pred pred, const T1& v1) { if (pred(v1)) return AssertionSuccess(); return AssertionFailure() @@ -98,40 +96,27 @@ AssertionResult AssertPred1Helper(const char* pred_text, // Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT1. // Don't use this in your code. -#define GTEST_PRED_FORMAT1_(pred_format, v1, on_failure)\ - GTEST_ASSERT_(pred_format(#v1, v1), \ - on_failure) +#define GTEST_PRED_FORMAT1_(pred_format, v1, on_failure) \ + GTEST_ASSERT_(pred_format(#v1, v1), on_failure) // Internal macro for implementing {EXPECT|ASSERT}_PRED1. Don't use // this in your code. -#define GTEST_PRED1_(pred, v1, on_failure)\ - GTEST_ASSERT_(::testing::AssertPred1Helper(#pred, \ - #v1, \ - pred, \ - v1), on_failure) +#define GTEST_PRED1_(pred, v1, on_failure) \ + GTEST_ASSERT_(::testing::AssertPred1Helper(#pred, #v1, pred, v1), on_failure) // Unary predicate assertion macros. #define EXPECT_PRED_FORMAT1(pred_format, v1) \ GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_NONFATAL_FAILURE_) -#define EXPECT_PRED1(pred, v1) \ - GTEST_PRED1_(pred, v1, GTEST_NONFATAL_FAILURE_) +#define EXPECT_PRED1(pred, v1) GTEST_PRED1_(pred, v1, GTEST_NONFATAL_FAILURE_) #define ASSERT_PRED_FORMAT1(pred_format, v1) \ GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_FATAL_FAILURE_) -#define ASSERT_PRED1(pred, v1) \ - GTEST_PRED1_(pred, v1, GTEST_FATAL_FAILURE_) - - +#define ASSERT_PRED1(pred, v1) GTEST_PRED1_(pred, v1, GTEST_FATAL_FAILURE_) // Helper function for implementing {EXPECT|ASSERT}_PRED2. Don't use // this in your code. -template -AssertionResult AssertPred2Helper(const char* pred_text, - const char* e1, - const char* e2, - Pred pred, - const T1& v1, +template +AssertionResult AssertPred2Helper(const char* pred_text, const char* e1, + const char* e2, Pred pred, const T1& v1, const T2& v2) { if (pred(v1, v2)) return AssertionSuccess(); @@ -145,19 +130,14 @@ AssertionResult AssertPred2Helper(const char* pred_text, // Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT2. // Don't use this in your code. -#define GTEST_PRED_FORMAT2_(pred_format, v1, v2, on_failure)\ - GTEST_ASSERT_(pred_format(#v1, #v2, v1, v2), \ - on_failure) +#define GTEST_PRED_FORMAT2_(pred_format, v1, v2, on_failure) \ + GTEST_ASSERT_(pred_format(#v1, #v2, v1, v2), on_failure) // Internal macro for implementing {EXPECT|ASSERT}_PRED2. Don't use // this in your code. -#define GTEST_PRED2_(pred, v1, v2, on_failure)\ - GTEST_ASSERT_(::testing::AssertPred2Helper(#pred, \ - #v1, \ - #v2, \ - pred, \ - v1, \ - v2), on_failure) +#define GTEST_PRED2_(pred, v1, v2, on_failure) \ + GTEST_ASSERT_(::testing::AssertPred2Helper(#pred, #v1, #v2, pred, v1, v2), \ + on_failure) // Binary predicate assertion macros. #define EXPECT_PRED_FORMAT2(pred_format, v1, v2) \ @@ -169,22 +149,12 @@ AssertionResult AssertPred2Helper(const char* pred_text, #define ASSERT_PRED2(pred, v1, v2) \ GTEST_PRED2_(pred, v1, v2, GTEST_FATAL_FAILURE_) - - // Helper function for implementing {EXPECT|ASSERT}_PRED3. Don't use // this in your code. -template -AssertionResult AssertPred3Helper(const char* pred_text, - const char* e1, - const char* e2, - const char* e3, - Pred pred, - const T1& v1, - const T2& v2, - const T3& v3) { +template +AssertionResult AssertPred3Helper(const char* pred_text, const char* e1, + const char* e2, const char* e3, Pred pred, + const T1& v1, const T2& v2, const T3& v3) { if (pred(v1, v2, v3)) return AssertionSuccess(); return AssertionFailure() @@ -198,21 +168,15 @@ AssertionResult AssertPred3Helper(const char* pred_text, // Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT3. // Don't use this in your code. -#define GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, on_failure)\ - GTEST_ASSERT_(pred_format(#v1, #v2, #v3, v1, v2, v3), \ - on_failure) +#define GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, on_failure) \ + GTEST_ASSERT_(pred_format(#v1, #v2, #v3, v1, v2, v3), on_failure) // Internal macro for implementing {EXPECT|ASSERT}_PRED3. Don't use // this in your code. -#define GTEST_PRED3_(pred, v1, v2, v3, on_failure)\ - GTEST_ASSERT_(::testing::AssertPred3Helper(#pred, \ - #v1, \ - #v2, \ - #v3, \ - pred, \ - v1, \ - v2, \ - v3), on_failure) +#define GTEST_PRED3_(pred, v1, v2, v3, on_failure) \ + GTEST_ASSERT_( \ + ::testing::AssertPred3Helper(#pred, #v1, #v2, #v3, pred, v1, v2, v3), \ + on_failure) // Ternary predicate assertion macros. #define EXPECT_PRED_FORMAT3(pred_format, v1, v2, v3) \ @@ -224,25 +188,13 @@ AssertionResult AssertPred3Helper(const char* pred_text, #define ASSERT_PRED3(pred, v1, v2, v3) \ GTEST_PRED3_(pred, v1, v2, v3, GTEST_FATAL_FAILURE_) - - // Helper function for implementing {EXPECT|ASSERT}_PRED4. Don't use // this in your code. -template -AssertionResult AssertPred4Helper(const char* pred_text, - const char* e1, - const char* e2, - const char* e3, - const char* e4, - Pred pred, - const T1& v1, - const T2& v2, - const T3& v3, - const T4& v4) { +template +AssertionResult AssertPred4Helper(const char* pred_text, const char* e1, + const char* e2, const char* e3, + const char* e4, Pred pred, const T1& v1, + const T2& v2, const T3& v3, const T4& v4) { if (pred(v1, v2, v3, v4)) return AssertionSuccess(); return AssertionFailure() @@ -257,23 +209,15 @@ AssertionResult AssertPred4Helper(const char* pred_text, // Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT4. // Don't use this in your code. -#define GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, on_failure)\ - GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, v1, v2, v3, v4), \ - on_failure) +#define GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, on_failure) \ + GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, v1, v2, v3, v4), on_failure) // Internal macro for implementing {EXPECT|ASSERT}_PRED4. Don't use // this in your code. -#define GTEST_PRED4_(pred, v1, v2, v3, v4, on_failure)\ - GTEST_ASSERT_(::testing::AssertPred4Helper(#pred, \ - #v1, \ - #v2, \ - #v3, \ - #v4, \ - pred, \ - v1, \ - v2, \ - v3, \ - v4), on_failure) +#define GTEST_PRED4_(pred, v1, v2, v3, v4, on_failure) \ + GTEST_ASSERT_(::testing::AssertPred4Helper(#pred, #v1, #v2, #v3, #v4, pred, \ + v1, v2, v3, v4), \ + on_failure) // 4-ary predicate assertion macros. #define EXPECT_PRED_FORMAT4(pred_format, v1, v2, v3, v4) \ @@ -285,28 +229,15 @@ AssertionResult AssertPred4Helper(const char* pred_text, #define ASSERT_PRED4(pred, v1, v2, v3, v4) \ GTEST_PRED4_(pred, v1, v2, v3, v4, GTEST_FATAL_FAILURE_) - - // Helper function for implementing {EXPECT|ASSERT}_PRED5. Don't use // this in your code. -template -AssertionResult AssertPred5Helper(const char* pred_text, - const char* e1, - const char* e2, - const char* e3, - const char* e4, - const char* e5, - Pred pred, - const T1& v1, - const T2& v2, - const T3& v3, - const T4& v4, - const T5& v5) { +AssertionResult AssertPred5Helper(const char* pred_text, const char* e1, + const char* e2, const char* e3, + const char* e4, const char* e5, Pred pred, + const T1& v1, const T2& v2, const T3& v3, + const T4& v4, const T5& v5) { if (pred(v1, v2, v3, v4, v5)) return AssertionSuccess(); return AssertionFailure() @@ -322,25 +253,16 @@ AssertionResult AssertPred5Helper(const char* pred_text, // Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT5. // Don't use this in your code. -#define GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, on_failure)\ +#define GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, on_failure) \ GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, #v5, v1, v2, v3, v4, v5), \ on_failure) // Internal macro for implementing {EXPECT|ASSERT}_PRED5. Don't use // this in your code. -#define GTEST_PRED5_(pred, v1, v2, v3, v4, v5, on_failure)\ - GTEST_ASSERT_(::testing::AssertPred5Helper(#pred, \ - #v1, \ - #v2, \ - #v3, \ - #v4, \ - #v5, \ - pred, \ - v1, \ - v2, \ - v3, \ - v4, \ - v5), on_failure) +#define GTEST_PRED5_(pred, v1, v2, v3, v4, v5, on_failure) \ + GTEST_ASSERT_(::testing::AssertPred5Helper(#pred, #v1, #v2, #v3, #v4, #v5, \ + pred, v1, v2, v3, v4, v5), \ + on_failure) // 5-ary predicate assertion macros. #define EXPECT_PRED_FORMAT5(pred_format, v1, v2, v3, v4, v5) \ @@ -352,8 +274,6 @@ AssertionResult AssertPred5Helper(const char* pred_text, #define ASSERT_PRED5(pred, v1, v2, v3, v4, v5) \ GTEST_PRED5_(pred, v1, v2, v3, v4, v5, GTEST_FATAL_FAILURE_) - - } // namespace testing #endif // GOOGLETEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_ diff --git a/deps/googletest/include/gtest/gtest_prod.h b/deps/googletest/include/gtest/gtest_prod.h index 38b9d85a51efb7..1f37dc31c34722 100644 --- a/deps/googletest/include/gtest/gtest_prod.h +++ b/deps/googletest/include/gtest/gtest_prod.h @@ -27,9 +27,8 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// -// Google C++ Testing and Mocking Framework definitions useful in production code. -// GOOGLETEST_CM0003 DO NOT DELETE +// Google C++ Testing and Mocking Framework definitions useful in production +// code. #ifndef GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ #define GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ @@ -55,7 +54,7 @@ // Note: The test class must be in the same namespace as the class being tested. // For example, putting MyClassTest in an anonymous namespace will not work. -#define FRIEND_TEST(test_case_name, test_name)\ -friend class test_case_name##_##test_name##_Test +#define FRIEND_TEST(test_case_name, test_name) \ + friend class test_case_name##_##test_name##_Test #endif // GOOGLETEST_INCLUDE_GTEST_GTEST_PROD_H_ diff --git a/deps/googletest/include/gtest/internal/custom/README.md b/deps/googletest/include/gtest/internal/custom/README.md index 0af3539abf11a9..cb49e2c754c250 100644 --- a/deps/googletest/include/gtest/internal/custom/README.md +++ b/deps/googletest/include/gtest/internal/custom/README.md @@ -15,20 +15,6 @@ The custom directory is an injection point for custom user configurations. The following macros can be defined: -### Flag related macros: - -* `GTEST_FLAG(flag_name)` -* `GTEST_USE_OWN_FLAGFILE_FLAG_` - Define to 0 when the system provides its - own flagfile flag parsing. -* `GTEST_DECLARE_bool_(name)` -* `GTEST_DECLARE_int32_(name)` -* `GTEST_DECLARE_string_(name)` -* `GTEST_DEFINE_bool_(name, default_val, doc)` -* `GTEST_DEFINE_int32_(name, default_val, doc)` -* `GTEST_DEFINE_string_(name, default_val, doc)` -* `GTEST_FLAG_GET(flag_name)` -* `GTEST_FLAG_SET(flag_name, value)` - ### Logging: * `GTEST_LOG_(severity)` diff --git a/deps/googletest/include/gtest/internal/gtest-death-test-internal.h b/deps/googletest/include/gtest/internal/gtest-death-test-internal.h index 44277c3869bca3..4687dae2b46af6 100644 --- a/deps/googletest/include/gtest/internal/gtest-death-test-internal.h +++ b/deps/googletest/include/gtest/internal/gtest-death-test-internal.h @@ -26,21 +26,26 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // The Google C++ Testing and Mocking Framework (Google Test) // // This header file defines internal utilities needed for implementing // death tests. They are subject to change without notice. -// GOOGLETEST_CM0001 DO NOT DELETE + +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_ -#include "gtest/gtest-matchers.h" -#include "gtest/internal/gtest-internal.h" - #include + #include +#include + +#include "gtest/gtest-matchers.h" +#include "gtest/internal/gtest-internal.h" GTEST_DECLARE_string_(internal_run_death_test); @@ -83,17 +88,19 @@ class GTEST_API_ DeathTest { static bool Create(const char* statement, Matcher matcher, const char* file, int line, DeathTest** test); DeathTest(); - virtual ~DeathTest() { } + virtual ~DeathTest() {} // A helper class that aborts a death test when it's deleted. class ReturnSentinel { public: - explicit ReturnSentinel(DeathTest* test) : test_(test) { } + explicit ReturnSentinel(DeathTest* test) : test_(test) {} ~ReturnSentinel() { test_->Abort(TEST_ENCOUNTERED_RETURN_STATEMENT); } + private: DeathTest* const test_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ReturnSentinel); - } GTEST_ATTRIBUTE_UNUSED_; + ReturnSentinel(const ReturnSentinel&) = delete; + ReturnSentinel& operator=(const ReturnSentinel&) = delete; + }; // An enumeration of possible roles that may be taken when a death // test is encountered. EXECUTE means that the death test logic should @@ -137,7 +144,8 @@ class GTEST_API_ DeathTest { // A string containing a description of the outcome of the last death test. static std::string last_death_test_message_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(DeathTest); + DeathTest(const DeathTest&) = delete; + DeathTest& operator=(const DeathTest&) = delete; }; GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 @@ -145,7 +153,7 @@ GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 // Factory interface for death tests. May be mocked out for testing. class DeathTestFactory { public: - virtual ~DeathTestFactory() { } + virtual ~DeathTestFactory() {} virtual bool Create(const char* statement, Matcher matcher, const char* file, int line, DeathTest** test) = 0; @@ -186,28 +194,28 @@ inline Matcher MakeDeathTestMatcher( // Traps C++ exceptions escaping statement and reports them as test // failures. Note that trapping SEH exceptions is not implemented here. -# if GTEST_HAS_EXCEPTIONS -# define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \ - try { \ - GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ - } catch (const ::std::exception& gtest_exception) { \ - fprintf(\ - stderr, \ - "\n%s: Caught std::exception-derived exception escaping the " \ - "death test statement. Exception message: %s\n", \ +#if GTEST_HAS_EXCEPTIONS +#define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \ + try { \ + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ + } catch (const ::std::exception& gtest_exception) { \ + fprintf( \ + stderr, \ + "\n%s: Caught std::exception-derived exception escaping the " \ + "death test statement. Exception message: %s\n", \ ::testing::internal::FormatFileLocation(__FILE__, __LINE__).c_str(), \ - gtest_exception.what()); \ - fflush(stderr); \ + gtest_exception.what()); \ + fflush(stderr); \ death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \ - } catch (...) { \ + } catch (...) { \ death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \ } -# else -# define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \ +#else +#define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \ GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) -# endif +#endif // This macro is for implementing ASSERT_DEATH*, EXPECT_DEATH*, // ASSERT_EXIT*, and EXPECT_EXIT*. @@ -263,16 +271,12 @@ inline Matcher MakeDeathTestMatcher( // RUN_ALL_TESTS was called. class InternalRunDeathTestFlag { public: - InternalRunDeathTestFlag(const std::string& a_file, - int a_line, - int an_index, + InternalRunDeathTestFlag(const std::string& a_file, int a_line, int an_index, int a_write_fd) - : file_(a_file), line_(a_line), index_(an_index), - write_fd_(a_write_fd) {} + : file_(a_file), line_(a_line), index_(an_index), write_fd_(a_write_fd) {} ~InternalRunDeathTestFlag() { - if (write_fd_ >= 0) - posix::Close(write_fd_); + if (write_fd_ >= 0) posix::Close(write_fd_); } const std::string& file() const { return file_; } @@ -286,7 +290,8 @@ class InternalRunDeathTestFlag { int index_; int write_fd_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(InternalRunDeathTestFlag); + InternalRunDeathTestFlag(const InternalRunDeathTestFlag&) = delete; + InternalRunDeathTestFlag& operator=(const InternalRunDeathTestFlag&) = delete; }; // Returns a newly created InternalRunDeathTestFlag object with fields diff --git a/deps/googletest/include/gtest/internal/gtest-filepath.h b/deps/googletest/include/gtest/internal/gtest-filepath.h index 0c033abc34e003..5189c81dabfa66 100644 --- a/deps/googletest/include/gtest/internal/gtest-filepath.h +++ b/deps/googletest/include/gtest/internal/gtest-filepath.h @@ -26,7 +26,7 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // Google Test filepath utilities // // This header file declares classes and functions used internally by @@ -35,16 +35,23 @@ // This file is #included in gtest/internal/gtest-internal.h. // Do not include this header file separately! -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_ +#include + +#include "gtest/internal/gtest-port.h" #include "gtest/internal/gtest-string.h" GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ /* class A needs to have dll-interface to be used by clients of class B */) +#if GTEST_HAS_FILE_SYSTEM + namespace testing { namespace internal { @@ -61,8 +68,8 @@ namespace internal { class GTEST_API_ FilePath { public: - FilePath() : pathname_("") { } - FilePath(const FilePath& rhs) : pathname_(rhs.pathname_) { } + FilePath() : pathname_("") {} + FilePath(const FilePath& rhs) : pathname_(rhs.pathname_) {} explicit FilePath(const std::string& pathname) : pathname_(pathname) { Normalize(); @@ -73,9 +80,7 @@ class GTEST_API_ FilePath { return *this; } - void Set(const FilePath& rhs) { - pathname_ = rhs.pathname_; - } + void Set(const FilePath& rhs) { pathname_ = rhs.pathname_; } const std::string& string() const { return pathname_; } const char* c_str() const { return pathname_.c_str(); } @@ -88,8 +93,7 @@ class GTEST_API_ FilePath { // than zero (e.g., 12), returns "dir/test_12.xml". // On Windows platform, uses \ as the separator rather than /. static FilePath MakeFileName(const FilePath& directory, - const FilePath& base_name, - int number, + const FilePath& base_name, int number, const char* extension); // Given directory = "dir", relative_path = "test.xml", @@ -200,6 +204,16 @@ class GTEST_API_ FilePath { // separators. Returns NULL if no path separator was found. const char* FindLastPathSeparator() const; + // Returns the length of the path root, including the directory separator at + // the end of the prefix. Returns zero by definition if the path is relative. + // Examples: + // - [Windows] "..\Sibling" => 0 + // - [Windows] "\Windows" => 1 + // - [Windows] "C:/Windows\Notepad.exe" => 3 + // - [Windows] "\\Host\Share\C$/Windows" => 13 + // - [UNIX] "/bin" => 1 + size_t CalculateRootLength() const; + std::string pathname_; }; // class FilePath @@ -208,4 +222,6 @@ class GTEST_API_ FilePath { GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 +#endif // GTEST_HAS_FILE_SYSTEM + #endif // GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_ diff --git a/deps/googletest/include/gtest/internal/gtest-internal.h b/deps/googletest/include/gtest/internal/gtest-internal.h index f8cbdbd81d9a4c..3121d428b39844 100644 --- a/deps/googletest/include/gtest/internal/gtest-internal.h +++ b/deps/googletest/include/gtest/internal/gtest-internal.h @@ -26,13 +26,15 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // The Google C++ Testing and Mocking Framework (Google Test) // // This header file declares functions and macros used internally by // Google Test. They are subject to change without notice. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_ @@ -40,26 +42,29 @@ #include "gtest/internal/gtest-port.h" #if GTEST_OS_LINUX -# include -# include -# include -# include +#include +#include +#include +#include #endif // GTEST_OS_LINUX #if GTEST_HAS_EXCEPTIONS -# include +#include #endif #include #include #include + #include +#include #include #include #include #include #include #include +#include #include #include "gtest/gtest-message.h" @@ -76,7 +81,7 @@ // the current line number. For more details, see // http://www.parashift.com/c++-faq-lite/misc-technical-issues.html#faq-39.6 #define GTEST_CONCAT_TOKEN_(foo, bar) GTEST_CONCAT_TOKEN_IMPL_(foo, bar) -#define GTEST_CONCAT_TOKEN_IMPL_(foo, bar) foo ## bar +#define GTEST_CONCAT_TOKEN_IMPL_(foo, bar) foo##bar // Stringifies its argument. // Work around a bug in visual studio which doesn't accept code like this: @@ -98,21 +103,21 @@ namespace testing { // Forward declarations. -class AssertionResult; // Result of an assertion. -class Message; // Represents a failure message. -class Test; // Represents a test. -class TestInfo; // Information about a test. -class TestPartResult; // Result of a test part. -class UnitTest; // A collection of test suites. +class AssertionResult; // Result of an assertion. +class Message; // Represents a failure message. +class Test; // Represents a test. +class TestInfo; // Information about a test. +class TestPartResult; // Result of a test part. +class UnitTest; // A collection of test suites. template ::std::string PrintToString(const T& value); namespace internal { -struct TraceInfo; // Information about a trace point. -class TestInfoImpl; // Opaque implementation of TestInfo -class UnitTestImpl; // Opaque implementation of UnitTest +struct TraceInfo; // Information about a trace point. +class TestInfoImpl; // Opaque implementation of TestInfo +class UnitTestImpl; // Opaque implementation of UnitTest // The text used in failure messages to indicate the start of the // stack trace. @@ -121,6 +126,7 @@ GTEST_API_ extern const char kStackTraceMarker[]; // An IgnoredValue object can be implicitly constructed from ANY value. class IgnoredValue { struct Sink {}; + public: // This constructor template allows any value to be implicitly // converted to IgnoredValue. The object has no data member and @@ -136,13 +142,13 @@ class IgnoredValue { }; // Appends the user-supplied message to the Google-Test-generated message. -GTEST_API_ std::string AppendUserMessage( - const std::string& gtest_msg, const Message& user_msg); +GTEST_API_ std::string AppendUserMessage(const std::string& gtest_msg, + const Message& user_msg); #if GTEST_HAS_EXCEPTIONS -GTEST_DISABLE_MSC_WARNINGS_PUSH_(4275 \ -/* an exported class was derived from a class that was not exported */) +GTEST_DISABLE_MSC_WARNINGS_PUSH_( + 4275 /* an exported class was derived from a class that was not exported */) // This exception is thrown by (and only by) a failed Google Test // assertion when GTEST_FLAG(throw_on_failure) is true (if exceptions @@ -181,14 +187,6 @@ GTEST_API_ std::string CreateUnifiedDiff(const std::vector& left, } // namespace edit_distance -// Calculate the diff between 'left' and 'right' and return it in unified diff -// format. -// If not null, stores in 'total_line_count' the total number of lines found -// in left + right. -GTEST_API_ std::string DiffStrings(const std::string& left, - const std::string& right, - size_t* total_line_count); - // Constructs and returns the message for an equality assertion // (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure. // @@ -212,10 +210,8 @@ GTEST_API_ AssertionResult EqFailure(const char* expected_expression, // Constructs a failure message for Boolean assertions such as EXPECT_TRUE. GTEST_API_ std::string GetBoolAssertionFailureMessage( - const AssertionResult& assertion_result, - const char* expression_text, - const char* actual_predicate_value, - const char* expected_predicate_value); + const AssertionResult& assertion_result, const char* expression_text, + const char* actual_predicate_value, const char* expected_predicate_value); // This template class represents an IEEE floating-point number // (either single-precision or double-precision, depending on the @@ -256,11 +252,11 @@ class FloatingPoint { // Constants. // # of bits in a number. - static const size_t kBitCount = 8*sizeof(RawType); + static const size_t kBitCount = 8 * sizeof(RawType); // # of fraction bits in a number. static const size_t kFractionBitCount = - std::numeric_limits::digits - 1; + std::numeric_limits::digits - 1; // # of exponent bits in a number. static const size_t kExponentBitCount = kBitCount - 1 - kFractionBitCount; @@ -269,8 +265,8 @@ class FloatingPoint { static const Bits kSignBitMask = static_cast(1) << (kBitCount - 1); // The mask for the fraction bits. - static const Bits kFractionBitMask = - ~static_cast(0) >> (kExponentBitCount + 1); + static const Bits kFractionBitMask = ~static_cast(0) >> + (kExponentBitCount + 1); // The mask for the exponent bits. static const Bits kExponentBitMask = ~(kSignBitMask | kFractionBitMask); @@ -309,9 +305,7 @@ class FloatingPoint { } // Returns the floating-point number that represent positive infinity. - static RawType Infinity() { - return ReinterpretBits(kExponentBitMask); - } + static RawType Infinity() { return ReinterpretBits(kExponentBitMask); } // Returns the maximum representable finite floating-point number. static RawType Max(); @@ -319,7 +313,7 @@ class FloatingPoint { // Non-static methods // Returns the bits that represents this number. - const Bits &bits() const { return u_.bits_; } + const Bits& bits() const { return u_.bits_; } // Returns the exponent bits of this number. Bits exponent_bits() const { return kExponentBitMask & u_.bits_; } @@ -348,8 +342,8 @@ class FloatingPoint { // a NAN must return false. if (is_nan() || rhs.is_nan()) return false; - return DistanceBetweenSignAndMagnitudeNumbers(u_.bits_, rhs.u_.bits_) - <= kMaxUlps; + return DistanceBetweenSignAndMagnitudeNumbers(u_.bits_, rhs.u_.bits_) <= + kMaxUlps; } private: @@ -374,7 +368,7 @@ class FloatingPoint { // // Read http://en.wikipedia.org/wiki/Signed_number_representations // for more details on signed number representations. - static Bits SignAndMagnitudeToBiased(const Bits &sam) { + static Bits SignAndMagnitudeToBiased(const Bits& sam) { if (kSignBitMask & sam) { // sam represents a negative number. return ~sam + 1; @@ -386,8 +380,8 @@ class FloatingPoint { // Given two numbers in the sign-and-magnitude representation, // returns the distance between them as an unsigned number. - static Bits DistanceBetweenSignAndMagnitudeNumbers(const Bits &sam1, - const Bits &sam2) { + static Bits DistanceBetweenSignAndMagnitudeNumbers(const Bits& sam1, + const Bits& sam2) { const Bits biased1 = SignAndMagnitudeToBiased(sam1); const Bits biased2 = SignAndMagnitudeToBiased(sam2); return (biased1 >= biased2) ? (biased1 - biased2) : (biased2 - biased1); @@ -399,9 +393,13 @@ class FloatingPoint { // We cannot use std::numeric_limits::max() as it clashes with the max() // macro defined by . template <> -inline float FloatingPoint::Max() { return FLT_MAX; } +inline float FloatingPoint::Max() { + return FLT_MAX; +} template <> -inline double FloatingPoint::Max() { return DBL_MAX; } +inline double FloatingPoint::Max() { + return DBL_MAX; +} // Typedefs the instances of the FloatingPoint template class that we // care to use. @@ -461,10 +459,11 @@ class TestFactoryBase { TestFactoryBase() {} private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestFactoryBase); + TestFactoryBase(const TestFactoryBase&) = delete; + TestFactoryBase& operator=(const TestFactoryBase&) = delete; }; -// This class provides implementation of TeastFactoryBase interface. +// This class provides implementation of TestFactoryBase interface. // It is used in TEST and TEST_F macros. template class TestFactoryImpl : public TestFactoryBase { @@ -510,11 +509,11 @@ inline SetUpTearDownSuiteFuncType GetNotDefaultOrNull( template // Note that SuiteApiResolver inherits from T because -// SetUpTestSuite()/TearDownTestSuite() could be protected. Ths way +// SetUpTestSuite()/TearDownTestSuite() could be protected. This way // SuiteApiResolver can access them. struct SuiteApiResolver : T { // testing::Test is only forward declared at this point. So we make it a - // dependend class for the compiler to be OK with it. + // dependent class for the compiler to be OK with it. using Test = typename std::conditional::type; @@ -634,7 +633,7 @@ class GTEST_API_ TypedTestSuitePState { const char* registered_tests); private: - typedef ::std::map RegisteredTestsMap; + typedef ::std::map> RegisteredTestsMap; bool registered_; RegisteredTestsMap registered_tests_; @@ -654,7 +653,8 @@ inline const char* SkipComma(const char* str) { if (comma == nullptr) { return nullptr; } - while (IsSpace(*(++comma))) {} + while (IsSpace(*(++comma))) { + } return comma; } @@ -668,7 +668,7 @@ inline std::string GetPrefixUntilComma(const char* str) { // Splits a given string on a given delimiter, populating a given // vector with the fields. void SplitString(const ::std::string& str, char delimiter, - ::std::vector< ::std::string>* dest); + ::std::vector<::std::string>* dest); // The default argument to the template below for the case when the user does // not provide a name generator. @@ -781,13 +781,13 @@ class TypeParameterizedTestSuite { const std::vector& type_names = GenerateNames()) { RegisterTypeParameterizedTestSuiteInstantiation(case_name); - std::string test_name = StripTrailingSpaces( - GetPrefixUntilComma(test_names)); + std::string test_name = + StripTrailingSpaces(GetPrefixUntilComma(test_names)); if (!state->TestExists(test_name)) { fprintf(stderr, "Failed to get code location for test %s.%s at %s.", case_name, test_name.c_str(), - FormatFileLocation(code_location.file.c_str(), - code_location.line).c_str()); + FormatFileLocation(code_location.file.c_str(), code_location.line) + .c_str()); fflush(stderr); posix::Abort(); } @@ -831,8 +831,7 @@ class TypeParameterizedTestSuite { // For example, if Foo() calls Bar(), which in turn calls // GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in // the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't. -GTEST_API_ std::string GetCurrentOsStackTraceExceptTop( - UnitTest* unit_test, int skip_count); +GTEST_API_ std::string GetCurrentOsStackTraceExceptTop(int skip_count); // Helpers for suppressing warnings on unreachable code or constant // condition. @@ -881,7 +880,8 @@ class GTEST_API_ Random { private: uint32_t state_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(Random); + Random(const Random&) = delete; + Random& operator=(const Random&) = delete; }; // Turns const U&, U&, const U, and U all into U. @@ -954,7 +954,9 @@ IsContainer IsContainerTest(int /* dummy */) { typedef char IsNotContainer; template -IsNotContainer IsContainerTest(long /* dummy */) { return '\0'; } +IsNotContainer IsContainerTest(long /* dummy */) { + return '\0'; +} // Trait to detect whether a type T is a hash table. // The heuristic used is that the type contains an inner type `hasher` and does @@ -1017,11 +1019,13 @@ bool ArrayEq(const T* lhs, size_t size, const U* rhs); // This generic version is used when k is 0. template -inline bool ArrayEq(const T& lhs, const U& rhs) { return lhs == rhs; } +inline bool ArrayEq(const T& lhs, const U& rhs) { + return lhs == rhs; +} // This overload is used when k >= 1. template -inline bool ArrayEq(const T(&lhs)[N], const U(&rhs)[N]) { +inline bool ArrayEq(const T (&lhs)[N], const U (&rhs)[N]) { return internal::ArrayEq(lhs, N, rhs); } @@ -1031,8 +1035,7 @@ inline bool ArrayEq(const T(&lhs)[N], const U(&rhs)[N]) { template bool ArrayEq(const T* lhs, size_t size, const U* rhs) { for (size_t i = 0; i != size; i++) { - if (!internal::ArrayEq(lhs[i], rhs[i])) - return false; + if (!internal::ArrayEq(lhs[i], rhs[i])) return false; } return true; } @@ -1042,8 +1045,7 @@ bool ArrayEq(const T* lhs, size_t size, const U* rhs) { template Iter ArrayAwareFind(Iter begin, Iter end, const Element& elem) { for (Iter it = begin; it != end; ++it) { - if (internal::ArrayEq(*it, elem)) - return it; + if (internal::ArrayEq(*it, elem)) return it; } return end; } @@ -1057,11 +1059,13 @@ void CopyArray(const T* from, size_t size, U* to); // This generic version is used when k is 0. template -inline void CopyArray(const T& from, U* to) { *to = from; } +inline void CopyArray(const T& from, U* to) { + *to = from; +} // This overload is used when k >= 1. template -inline void CopyArray(const T(&from)[N], U(*to)[N]) { +inline void CopyArray(const T (&from)[N], U (*to)[N]) { internal::CopyArray(from, N, *to); } @@ -1114,8 +1118,7 @@ class NativeArray { } ~NativeArray() { - if (clone_ != &NativeArray::InitRef) - delete[] array_; + if (clone_ != &NativeArray::InitRef) delete[] array_; } // STL-style container methods. @@ -1123,8 +1126,7 @@ class NativeArray { const_iterator begin() const { return array_; } const_iterator end() const { return array_ + size_; } bool operator==(const NativeArray& rhs) const { - return size() == rhs.size() && - ArrayEq(begin(), size(), rhs.begin()); + return size() == rhs.size() && ArrayEq(begin(), size(), rhs.begin()); } private: @@ -1335,9 +1337,9 @@ struct tuple_size> #endif } // namespace std -#define GTEST_MESSAGE_AT_(file, line, message, result_type) \ - ::testing::internal::AssertHelper(result_type, file, line, message) \ - = ::testing::Message() +#define GTEST_MESSAGE_AT_(file, line, message, result_type) \ + ::testing::internal::AssertHelper(result_type, file, line, message) = \ + ::testing::Message() #define GTEST_MESSAGE_(message, result_type) \ GTEST_MESSAGE_AT_(__FILE__, __LINE__, message, result_type) @@ -1458,103 +1460,112 @@ class NeverThrown { #endif // GTEST_HAS_EXCEPTIONS -#define GTEST_TEST_NO_THROW_(statement, fail) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (::testing::internal::TrueWithString gtest_msg{}) { \ - try { \ - GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ - } \ - GTEST_TEST_NO_THROW_CATCH_STD_EXCEPTION_() \ - catch (...) { \ - gtest_msg.value = "it throws."; \ - goto GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__); \ - } \ - } else \ - GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__): \ - fail(("Expected: " #statement " doesn't throw an exception.\n" \ - " Actual: " + gtest_msg.value).c_str()) - -#define GTEST_TEST_ANY_THROW_(statement, fail) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (::testing::internal::AlwaysTrue()) { \ - bool gtest_caught_any = false; \ - try { \ - GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ - } \ - catch (...) { \ - gtest_caught_any = true; \ - } \ - if (!gtest_caught_any) { \ +#define GTEST_TEST_NO_THROW_(statement, fail) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (::testing::internal::TrueWithString gtest_msg{}) { \ + try { \ + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ + } \ + GTEST_TEST_NO_THROW_CATCH_STD_EXCEPTION_() \ + catch (...) { \ + gtest_msg.value = "it throws."; \ + goto GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__); \ + } \ + } else \ + GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__) \ + : fail(("Expected: " #statement " doesn't throw an exception.\n" \ + " Actual: " + \ + gtest_msg.value) \ + .c_str()) + +#define GTEST_TEST_ANY_THROW_(statement, fail) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (::testing::internal::AlwaysTrue()) { \ + bool gtest_caught_any = false; \ + try { \ + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ + } catch (...) { \ + gtest_caught_any = true; \ + } \ + if (!gtest_caught_any) { \ goto GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__); \ - } \ - } else \ - GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__): \ - fail("Expected: " #statement " throws an exception.\n" \ - " Actual: it doesn't.") - + } \ + } else \ + GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__) \ + : fail("Expected: " #statement \ + " throws an exception.\n" \ + " Actual: it doesn't.") // Implements Boolean test assertions such as EXPECT_TRUE. expression can be // either a boolean expression or an AssertionResult. text is a textual // representation of expression as it was passed into the EXPECT_TRUE. #define GTEST_TEST_BOOLEAN_(expression, text, actual, expected, fail) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (const ::testing::AssertionResult gtest_ar_ = \ - ::testing::AssertionResult(expression)) \ - ; \ - else \ - fail(::testing::internal::GetBoolAssertionFailureMessage(\ - gtest_ar_, text, #actual, #expected).c_str()) - -#define GTEST_TEST_NO_FATAL_FAILURE_(statement, fail) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (::testing::internal::AlwaysTrue()) { \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (const ::testing::AssertionResult gtest_ar_ = \ + ::testing::AssertionResult(expression)) \ + ; \ + else \ + fail(::testing::internal::GetBoolAssertionFailureMessage( \ + gtest_ar_, text, #actual, #expected) \ + .c_str()) + +#define GTEST_TEST_NO_FATAL_FAILURE_(statement, fail) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (::testing::internal::AlwaysTrue()) { \ ::testing::internal::HasNewFatalFailureHelper gtest_fatal_failure_checker; \ - GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ - if (gtest_fatal_failure_checker.has_new_fatal_failure()) { \ - goto GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__); \ - } \ - } else \ - GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__): \ - fail("Expected: " #statement " doesn't generate new fatal " \ - "failures in the current thread.\n" \ - " Actual: it does.") + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \ + if (gtest_fatal_failure_checker.has_new_fatal_failure()) { \ + goto GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__); \ + } \ + } else \ + GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__) \ + : fail("Expected: " #statement \ + " doesn't generate new fatal " \ + "failures in the current thread.\n" \ + " Actual: it does.") // Expands to the name of the class that implements the given test. #define GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ test_suite_name##_##test_name##_Test // Helper macro for defining tests. -#define GTEST_TEST_(test_suite_name, test_name, parent_class, parent_id) \ - static_assert(sizeof(GTEST_STRINGIFY_(test_suite_name)) > 1, \ - "test_suite_name must not be empty"); \ - static_assert(sizeof(GTEST_STRINGIFY_(test_name)) > 1, \ - "test_name must not be empty"); \ - class GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ - : public parent_class { \ - public: \ - GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() = default; \ - ~GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() override = default; \ - GTEST_DISALLOW_COPY_AND_ASSIGN_(GTEST_TEST_CLASS_NAME_(test_suite_name, \ - test_name)); \ - GTEST_DISALLOW_MOVE_AND_ASSIGN_(GTEST_TEST_CLASS_NAME_(test_suite_name, \ - test_name)); \ - \ - private: \ - void TestBody() override; \ - static ::testing::TestInfo* const test_info_ GTEST_ATTRIBUTE_UNUSED_; \ - }; \ - \ - ::testing::TestInfo* const GTEST_TEST_CLASS_NAME_(test_suite_name, \ - test_name)::test_info_ = \ - ::testing::internal::MakeAndRegisterTestInfo( \ - #test_suite_name, #test_name, nullptr, nullptr, \ - ::testing::internal::CodeLocation(__FILE__, __LINE__), (parent_id), \ - ::testing::internal::SuiteApiResolver< \ - parent_class>::GetSetUpCaseOrSuite(__FILE__, __LINE__), \ - ::testing::internal::SuiteApiResolver< \ - parent_class>::GetTearDownCaseOrSuite(__FILE__, __LINE__), \ - new ::testing::internal::TestFactoryImpl); \ +#define GTEST_TEST_(test_suite_name, test_name, parent_class, parent_id) \ + static_assert(sizeof(GTEST_STRINGIFY_(test_suite_name)) > 1, \ + "test_suite_name must not be empty"); \ + static_assert(sizeof(GTEST_STRINGIFY_(test_name)) > 1, \ + "test_name must not be empty"); \ + class GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ + : public parent_class { \ + public: \ + GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() = default; \ + ~GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() override = default; \ + GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ + (const GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) &) = delete; \ + GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) & operator=( \ + const GTEST_TEST_CLASS_NAME_(test_suite_name, \ + test_name) &) = delete; /* NOLINT */ \ + GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \ + (GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) &&) noexcept = delete; \ + GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) & operator=( \ + GTEST_TEST_CLASS_NAME_(test_suite_name, \ + test_name) &&) noexcept = delete; /* NOLINT */ \ + \ + private: \ + void TestBody() override; \ + static ::testing::TestInfo* const test_info_ GTEST_ATTRIBUTE_UNUSED_; \ + }; \ + \ + ::testing::TestInfo* const GTEST_TEST_CLASS_NAME_(test_suite_name, \ + test_name)::test_info_ = \ + ::testing::internal::MakeAndRegisterTestInfo( \ + #test_suite_name, #test_name, nullptr, nullptr, \ + ::testing::internal::CodeLocation(__FILE__, __LINE__), (parent_id), \ + ::testing::internal::SuiteApiResolver< \ + parent_class>::GetSetUpCaseOrSuite(__FILE__, __LINE__), \ + ::testing::internal::SuiteApiResolver< \ + parent_class>::GetTearDownCaseOrSuite(__FILE__, __LINE__), \ + new ::testing::internal::TestFactoryImpl); \ void GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)::TestBody() #endif // GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_ diff --git a/deps/googletest/include/gtest/internal/gtest-param-util.h b/deps/googletest/include/gtest/internal/gtest-param-util.h index c2ef6e3124b05f..7092d10e677c7a 100644 --- a/deps/googletest/include/gtest/internal/gtest-param-util.h +++ b/deps/googletest/include/gtest/internal/gtest-param-util.h @@ -27,10 +27,11 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - // Type and function utilities for implementing parameterized tests. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_ @@ -39,26 +40,28 @@ #include #include +#include #include +#include #include +#include #include #include #include #include -#include "gtest/internal/gtest-internal.h" -#include "gtest/internal/gtest-port.h" #include "gtest/gtest-printers.h" #include "gtest/gtest-test-part.h" +#include "gtest/internal/gtest-internal.h" +#include "gtest/internal/gtest-port.h" namespace testing { // Input to a parameterized test name generator, describing a test parameter. // Consists of the parameter value and the integer parameter index. template struct TestParamInfo { - TestParamInfo(const ParamType& a_param, size_t an_index) : - param(a_param), - index(an_index) {} + TestParamInfo(const ParamType& a_param, size_t an_index) + : param(a_param), index(an_index) {} ParamType param; size_t index; }; @@ -84,8 +87,10 @@ namespace internal { GTEST_API_ void ReportInvalidTestSuiteType(const char* test_suite_name, CodeLocation code_location); -template class ParamGeneratorInterface; -template class ParamGenerator; +template +class ParamGeneratorInterface; +template +class ParamGenerator; // Interface for iterating over elements provided by an implementation // of ParamGeneratorInterface. @@ -129,8 +134,7 @@ class ParamIterator { // ParamIterator assumes ownership of the impl_ pointer. ParamIterator(const ParamIterator& other) : impl_(other.impl_->Clone()) {} ParamIterator& operator=(const ParamIterator& other) { - if (this != &other) - impl_.reset(other.impl_->Clone()); + if (this != &other) impl_.reset(other.impl_->Clone()); return *this; } @@ -157,7 +161,7 @@ class ParamIterator { private: friend class ParamGenerator; explicit ParamIterator(ParamIteratorInterface* impl) : impl_(impl) {} - std::unique_ptr > impl_; + std::unique_ptr> impl_; }; // ParamGeneratorInterface is the binary interface to access generators @@ -179,7 +183,7 @@ class ParamGeneratorInterface { // This class implements copy initialization semantics and the contained // ParamGeneratorInterface instance is shared among all copies // of the original object. This is possible because that instance is immutable. -template +template class ParamGenerator { public: typedef ParamIterator iterator; @@ -196,7 +200,7 @@ class ParamGenerator { iterator end() const { return iterator(impl_->End()); } private: - std::shared_ptr > impl_; + std::shared_ptr> impl_; }; // Generates values from a range of two comparable values. Can be used to @@ -207,8 +211,10 @@ template class RangeGenerator : public ParamGeneratorInterface { public: RangeGenerator(T begin, T end, IncrementT step) - : begin_(begin), end_(end), - step_(step), end_index_(CalculateEndIndex(begin, end, step)) {} + : begin_(begin), + end_(end), + step_(step), + end_index_(CalculateEndIndex(begin, end, step)) {} ~RangeGenerator() override {} ParamIteratorInterface* Begin() const override { @@ -251,7 +257,9 @@ class RangeGenerator : public ParamGeneratorInterface { private: Iterator(const Iterator& other) : ParamIteratorInterface(), - base_(other.base_), value_(other.value_), index_(other.index_), + base_(other.base_), + value_(other.value_), + index_(other.index_), step_(other.step_) {} // No implementation - assignment is unsupported. @@ -263,12 +271,10 @@ class RangeGenerator : public ParamGeneratorInterface { const IncrementT step_; }; // class RangeGenerator::Iterator - static int CalculateEndIndex(const T& begin, - const T& end, + static int CalculateEndIndex(const T& begin, const T& end, const IncrementT& step) { int end_index = 0; - for (T i = begin; i < end; i = static_cast(i + step)) - end_index++; + for (T i = begin; i < end; i = static_cast(i + step)) end_index++; return end_index; } @@ -283,7 +289,6 @@ class RangeGenerator : public ParamGeneratorInterface { const int end_index_; }; // class RangeGenerator - // Generates values from a pair of STL-style iterators. Used in the // ValuesIn() function. The elements are copied from the source range // since the source can be located on the stack, and the generator @@ -341,13 +346,13 @@ class ValuesInIteratorRangeGenerator : public ParamGeneratorInterface { << "The program attempted to compare iterators " << "from different generators." << std::endl; return iterator_ == - CheckedDowncastToActualType(&other)->iterator_; + CheckedDowncastToActualType(&other)->iterator_; } private: Iterator(const Iterator& other) - // The explicit constructor call suppresses a false warning - // emitted by gcc when supplied with the -Wextra option. + // The explicit constructor call suppresses a false warning + // emitted by gcc when supplied with the -Wextra option. : ParamIteratorInterface(), base_(other.base_), iterator_(other.iterator_) {} @@ -394,8 +399,8 @@ template class ParameterizedTestFactory : public TestFactoryBase { public: typedef typename TestClass::ParamType ParamType; - explicit ParameterizedTestFactory(ParamType parameter) : - parameter_(parameter) {} + explicit ParameterizedTestFactory(ParamType parameter) + : parameter_(parameter) {} Test* CreateTest() override { TestClass::SetParam(¶meter_); return new TestClass(); @@ -404,7 +409,8 @@ class ParameterizedTestFactory : public TestFactoryBase { private: const ParamType parameter_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestFactory); + ParameterizedTestFactory(const ParameterizedTestFactory&) = delete; + ParameterizedTestFactory& operator=(const ParameterizedTestFactory&) = delete; }; // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. @@ -440,7 +446,8 @@ class TestMetaFactory } private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestMetaFactory); + TestMetaFactory(const TestMetaFactory&) = delete; + TestMetaFactory& operator=(const TestMetaFactory&) = delete; }; // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. @@ -471,7 +478,10 @@ class ParameterizedTestSuiteInfoBase { ParameterizedTestSuiteInfoBase() {} private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteInfoBase); + ParameterizedTestSuiteInfoBase(const ParameterizedTestSuiteInfoBase&) = + delete; + ParameterizedTestSuiteInfoBase& operator=( + const ParameterizedTestSuiteInfoBase&) = delete; }; // INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE. @@ -547,8 +557,8 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { test_it != tests_.end(); ++test_it) { std::shared_ptr test_info = *test_it; for (typename InstantiationContainer::iterator gen_it = - instantiations_.begin(); gen_it != instantiations_.end(); - ++gen_it) { + instantiations_.begin(); + gen_it != instantiations_.end(); ++gen_it) { const std::string& instantiation_name = gen_it->name; ParamGenerator generator((*gen_it->generator)()); ParamNameGeneratorFunc* name_func = gen_it->name_func; @@ -556,7 +566,7 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { int line = gen_it->line; std::string test_suite_name; - if ( !instantiation_name.empty() ) + if (!instantiation_name.empty()) test_suite_name = instantiation_name + "/"; test_suite_name += test_info->test_suite_base_name; @@ -569,17 +579,16 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { Message test_name_stream; - std::string param_name = name_func( - TestParamInfo(*param_it, i)); + std::string param_name = + name_func(TestParamInfo(*param_it, i)); GTEST_CHECK_(IsValidParamName(param_name)) << "Parameterized test name '" << param_name - << "' is invalid, in " << file - << " line " << line << std::endl; + << "' is invalid, in " << file << " line " << line << std::endl; GTEST_CHECK_(test_param_names.count(param_name) == 0) - << "Duplicate parameterized test name '" << param_name - << "', in " << file << " line " << line << std::endl; + << "Duplicate parameterized test name '" << param_name << "', in " + << file << " line " << line << std::endl; test_param_names.insert(param_name); @@ -596,15 +605,15 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { SuiteApiResolver::GetTearDownCaseOrSuite(file, line), test_info->test_meta_factory->CreateTestFactory(*param_it)); } // for param_it - } // for gen_it - } // for test_it + } // for gen_it + } // for test_it if (!generated_instantiations) { // There are no generaotrs, or they all generate nothing ... InsertSyntheticTestCase(GetTestSuiteName(), code_location_, !tests_.empty()); } - } // RegisterTests + } // RegisterTests private: // LocalTestInfo structure keeps information about a single test registered @@ -620,42 +629,39 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { const std::string test_suite_base_name; const std::string test_base_name; - const std::unique_ptr > test_meta_factory; + const std::unique_ptr> test_meta_factory; const CodeLocation code_location; }; - using TestInfoContainer = ::std::vector >; + using TestInfoContainer = ::std::vector>; // Records data received from INSTANTIATE_TEST_SUITE_P macros: // struct InstantiationInfo { - InstantiationInfo(const std::string &name_in, - GeneratorCreationFunc* generator_in, - ParamNameGeneratorFunc* name_func_in, - const char* file_in, - int line_in) - : name(name_in), - generator(generator_in), - name_func(name_func_in), - file(file_in), - line(line_in) {} - - std::string name; - GeneratorCreationFunc* generator; - ParamNameGeneratorFunc* name_func; - const char* file; - int line; + InstantiationInfo(const std::string& name_in, + GeneratorCreationFunc* generator_in, + ParamNameGeneratorFunc* name_func_in, const char* file_in, + int line_in) + : name(name_in), + generator(generator_in), + name_func(name_func_in), + file(file_in), + line(line_in) {} + + std::string name; + GeneratorCreationFunc* generator; + ParamNameGeneratorFunc* name_func; + const char* file; + int line; }; typedef ::std::vector InstantiationContainer; static bool IsValidParamName(const std::string& name) { // Check for empty string - if (name.empty()) - return false; + if (name.empty()) return false; // Check for invalid characters for (std::string::size_type index = 0; index < name.size(); ++index) { - if (!IsAlNum(name[index]) && name[index] != '_') - return false; + if (!IsAlNum(name[index]) && name[index] != '_') return false; } return true; @@ -666,7 +672,9 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase { TestInfoContainer tests_; InstantiationContainer instantiations_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteInfo); + ParameterizedTestSuiteInfo(const ParameterizedTestSuiteInfo&) = delete; + ParameterizedTestSuiteInfo& operator=(const ParameterizedTestSuiteInfo&) = + delete; }; // class ParameterizedTestSuiteInfo // Legacy API is deprecated but still available @@ -709,7 +717,7 @@ class ParameterizedTestSuiteRegistry { // type we are looking for, so we downcast it to that type // without further checks. typed_test_info = CheckedDowncastToActualType< - ParameterizedTestSuiteInfo >(test_suite_info); + ParameterizedTestSuiteInfo>(test_suite_info); } break; } @@ -741,7 +749,10 @@ class ParameterizedTestSuiteRegistry { TestSuiteInfoContainer test_suite_infos_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteRegistry); + ParameterizedTestSuiteRegistry(const ParameterizedTestSuiteRegistry&) = + delete; + ParameterizedTestSuiteRegistry& operator=( + const ParameterizedTestSuiteRegistry&) = delete; }; // Keep track of what type-parameterized test suite are defined and @@ -836,7 +847,8 @@ class CartesianProductGenerator : public ParamIteratorInterface { public: IteratorImpl(const ParamGeneratorInterface* base, - const std::tuple...>& generators, bool is_end) + const std::tuple...>& generators, + bool is_end) : base_(base), begin_(std::get(generators).begin()...), end_(std::get(generators).end()...), @@ -941,6 +953,78 @@ class CartesianProductHolder { std::tuple generators_; }; +template +class ParamGeneratorConverter : public ParamGeneratorInterface { + public: + ParamGeneratorConverter(ParamGenerator gen) // NOLINT + : generator_(std::move(gen)) {} + + ParamIteratorInterface* Begin() const override { + return new Iterator(this, generator_.begin(), generator_.end()); + } + ParamIteratorInterface* End() const override { + return new Iterator(this, generator_.end(), generator_.end()); + } + + private: + class Iterator : public ParamIteratorInterface { + public: + Iterator(const ParamGeneratorInterface* base, ParamIterator it, + ParamIterator end) + : base_(base), it_(it), end_(end) { + if (it_ != end_) value_ = std::make_shared(static_cast(*it_)); + } + ~Iterator() override {} + + const ParamGeneratorInterface* BaseGenerator() const override { + return base_; + } + void Advance() override { + ++it_; + if (it_ != end_) value_ = std::make_shared(static_cast(*it_)); + } + ParamIteratorInterface* Clone() const override { + return new Iterator(*this); + } + const To* Current() const override { return value_.get(); } + bool Equals(const ParamIteratorInterface& other) const override { + // Having the same base generator guarantees that the other + // iterator is of the same type and we can downcast. + GTEST_CHECK_(BaseGenerator() == other.BaseGenerator()) + << "The program attempted to compare iterators " + << "from different generators." << std::endl; + const ParamIterator other_it = + CheckedDowncastToActualType(&other)->it_; + return it_ == other_it; + } + + private: + Iterator(const Iterator& other) = default; + + const ParamGeneratorInterface* const base_; + ParamIterator it_; + ParamIterator end_; + std::shared_ptr value_; + }; // class ParamGeneratorConverter::Iterator + + ParamGenerator generator_; +}; // class ParamGeneratorConverter + +template +class ParamConverterGenerator { + public: + ParamConverterGenerator(ParamGenerator g) // NOLINT + : generator_(std::move(g)) {} + + template + operator ParamGenerator() const { // NOLINT + return ParamGenerator(new ParamGeneratorConverter(generator_)); + } + + private: + ParamGenerator generator_; +}; + } // namespace internal } // namespace testing diff --git a/deps/googletest/include/gtest/internal/gtest-port-arch.h b/deps/googletest/include/gtest/internal/gtest-port-arch.h index 4dcdc89c859375..04064606f5d02b 100644 --- a/deps/googletest/include/gtest/internal/gtest-port-arch.h +++ b/deps/googletest/include/gtest/internal/gtest-port-arch.h @@ -26,7 +26,7 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // The Google C++ Testing and Mocking Framework (Google Test) // // This header file defines the GTEST_OS_* macro. @@ -37,72 +37,72 @@ // Determines the platform on which Google Test is compiled. #ifdef __CYGWIN__ -# define GTEST_OS_CYGWIN 1 -# elif defined(__MINGW__) || defined(__MINGW32__) || defined(__MINGW64__) -# define GTEST_OS_WINDOWS_MINGW 1 -# define GTEST_OS_WINDOWS 1 +#define GTEST_OS_CYGWIN 1 +#elif defined(__MINGW__) || defined(__MINGW32__) || defined(__MINGW64__) +#define GTEST_OS_WINDOWS_MINGW 1 +#define GTEST_OS_WINDOWS 1 #elif defined _WIN32 -# define GTEST_OS_WINDOWS 1 -# ifdef _WIN32_WCE -# define GTEST_OS_WINDOWS_MOBILE 1 -# elif defined(WINAPI_FAMILY) -# include -# if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) -# define GTEST_OS_WINDOWS_DESKTOP 1 -# elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP) -# define GTEST_OS_WINDOWS_PHONE 1 -# elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) -# define GTEST_OS_WINDOWS_RT 1 -# elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_TV_TITLE) -# define GTEST_OS_WINDOWS_PHONE 1 -# define GTEST_OS_WINDOWS_TV_TITLE 1 -# else - // WINAPI_FAMILY defined but no known partition matched. - // Default to desktop. -# define GTEST_OS_WINDOWS_DESKTOP 1 -# endif -# else -# define GTEST_OS_WINDOWS_DESKTOP 1 -# endif // _WIN32_WCE +#define GTEST_OS_WINDOWS 1 +#ifdef _WIN32_WCE +#define GTEST_OS_WINDOWS_MOBILE 1 +#elif defined(WINAPI_FAMILY) +#include +#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) +#define GTEST_OS_WINDOWS_DESKTOP 1 +#elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP) +#define GTEST_OS_WINDOWS_PHONE 1 +#elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP) +#define GTEST_OS_WINDOWS_RT 1 +#elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_TV_TITLE) +#define GTEST_OS_WINDOWS_PHONE 1 +#define GTEST_OS_WINDOWS_TV_TITLE 1 +#else +// WINAPI_FAMILY defined but no known partition matched. +// Default to desktop. +#define GTEST_OS_WINDOWS_DESKTOP 1 +#endif +#else +#define GTEST_OS_WINDOWS_DESKTOP 1 +#endif // _WIN32_WCE #elif defined __OS2__ -# define GTEST_OS_OS2 1 +#define GTEST_OS_OS2 1 #elif defined __APPLE__ -# define GTEST_OS_MAC 1 -# include -# if TARGET_OS_IPHONE -# define GTEST_OS_IOS 1 -# endif +#define GTEST_OS_MAC 1 +#include +#if TARGET_OS_IPHONE +#define GTEST_OS_IOS 1 +#endif #elif defined __DragonFly__ -# define GTEST_OS_DRAGONFLY 1 +#define GTEST_OS_DRAGONFLY 1 #elif defined __FreeBSD__ -# define GTEST_OS_FREEBSD 1 +#define GTEST_OS_FREEBSD 1 #elif defined __Fuchsia__ -# define GTEST_OS_FUCHSIA 1 +#define GTEST_OS_FUCHSIA 1 #elif defined(__GNU__) -# define GTEST_OS_GNU_HURD 1 +#define GTEST_OS_GNU_HURD 1 #elif defined(__GLIBC__) && defined(__FreeBSD_kernel__) -# define GTEST_OS_GNU_KFREEBSD 1 +#define GTEST_OS_GNU_KFREEBSD 1 #elif defined __linux__ -# define GTEST_OS_LINUX 1 -# if defined __ANDROID__ -# define GTEST_OS_LINUX_ANDROID 1 -# endif +#define GTEST_OS_LINUX 1 +#if defined __ANDROID__ +#define GTEST_OS_LINUX_ANDROID 1 +#endif #elif defined __MVS__ -# define GTEST_OS_ZOS 1 +#define GTEST_OS_ZOS 1 #elif defined(__sun) && defined(__SVR4) -# define GTEST_OS_SOLARIS 1 +#define GTEST_OS_SOLARIS 1 #elif defined(_AIX) -# define GTEST_OS_AIX 1 +#define GTEST_OS_AIX 1 #elif defined(__hpux) -# define GTEST_OS_HPUX 1 +#define GTEST_OS_HPUX 1 #elif defined __native_client__ -# define GTEST_OS_NACL 1 +#define GTEST_OS_NACL 1 #elif defined __NetBSD__ -# define GTEST_OS_NETBSD 1 +#define GTEST_OS_NETBSD 1 #elif defined __OpenBSD__ -# define GTEST_OS_OPENBSD 1 +#define GTEST_OS_OPENBSD 1 #elif defined __QNX__ -# define GTEST_OS_QNX 1 +#define GTEST_OS_QNX 1 #elif defined(__HAIKU__) #define GTEST_OS_HAIKU 1 #elif defined ESP8266 @@ -111,6 +111,8 @@ #define GTEST_OS_ESP32 1 #elif defined(__XTENSA__) #define GTEST_OS_XTENSA 1 +#elif defined(__hexagon__) +#define GTEST_OS_QURT 1 #endif // __CYGWIN__ #endif // GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_ARCH_H_ diff --git a/deps/googletest/include/gtest/internal/gtest-port.h b/deps/googletest/include/gtest/internal/gtest-port.h index 524bbeb011937e..6db191b7a06982 100644 --- a/deps/googletest/include/gtest/internal/gtest-port.h +++ b/deps/googletest/include/gtest/internal/gtest-port.h @@ -26,7 +26,7 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // Low-level types and utilities for porting Google Test to various // platforms. All macros ending with _ and symbols defined in an // internal namespace are subject to change without notice. Code @@ -38,7 +38,9 @@ // files are expected to #include this. Therefore, it cannot #include // any other Google Test header. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_ @@ -81,6 +83,8 @@ // GTEST_HAS_STD_WSTRING - Define it to 1/0 to indicate that // std::wstring does/doesn't work (Google Test can // be used where std::wstring is unavailable). +// GTEST_HAS_FILE_SYSTEM - Define it to 1/0 to indicate whether or not a +// file system is/isn't available. // GTEST_HAS_SEH - Define it to 1/0 to indicate whether the // compiler supports Microsoft's "Structured // Exception Handling". @@ -168,7 +172,7 @@ // GTEST_HAS_TYPED_TEST - typed tests // GTEST_HAS_TYPED_TEST_P - type-parameterized tests // GTEST_IS_THREADSAFE - Google Test is thread-safe. -// GOOGLETEST_CM0007 DO NOT DELETE +// GTEST_USES_RE2 - the RE2 regular expression library is used // GTEST_USES_POSIX_RE - enhanced POSIX regex is used. Do not confuse with // GTEST_HAS_POSIX_RE (see above) which users can // define themselves. @@ -191,10 +195,6 @@ // GTEST_AMBIGUOUS_ELSE_BLOCKER_ - for disabling a gcc warning. // GTEST_ATTRIBUTE_UNUSED_ - declares that a class' instances or a // variable don't have to be used. -// GTEST_DISALLOW_ASSIGN_ - disables copy operator=. -// GTEST_DISALLOW_COPY_AND_ASSIGN_ - disables copy ctor and operator=. -// GTEST_DISALLOW_MOVE_ASSIGN_ - disables move operator=. -// GTEST_DISALLOW_MOVE_AND_ASSIGN_ - disables move ctor and operator=. // GTEST_MUST_USE_RESULT_ - declares that a function's result must be used. // GTEST_INTENTIONAL_CONST_COND_PUSH_ - start code section where MSVC C4127 is // suppressed (constant conditional). @@ -218,11 +218,13 @@ // - synchronization primitives. // // Regular expressions: -// RE - a simple regular expression class using the POSIX -// Extended Regular Expression syntax on UNIX-like platforms -// GOOGLETEST_CM0008 DO NOT DELETE -// or a reduced regular exception syntax on other -// platforms, including Windows. +// RE - a simple regular expression class using +// 1) the RE2 syntax on all platforms when built with RE2 +// and Abseil as dependencies +// 2) the POSIX Extended Regular Expression syntax on +// UNIX-like platforms, +// 3) A reduced regular exception syntax on other platforms, +// including Windows. // Logging: // GTEST_LOG_() - logs messages at the specified severity level. // LogToStderr() - directs all log messages to stderr. @@ -242,8 +244,6 @@ // BiggestInt - the biggest signed integer type. // // Command-line utilities: -// GTEST_DECLARE_*() - declares a flag. -// GTEST_DEFINE_*() - defines a flag. // GetInjectableArgvs() - returns the command line as a vector of strings. // // Environment variable utilities: @@ -257,6 +257,19 @@ // deprecated; calling a marked function // should generate a compiler warning +// The definition of GTEST_INTERNAL_CPLUSPLUS_LANG comes first because it can +// potentially be used as an #include guard. +#if defined(_MSVC_LANG) +#define GTEST_INTERNAL_CPLUSPLUS_LANG _MSVC_LANG +#elif defined(__cplusplus) +#define GTEST_INTERNAL_CPLUSPLUS_LANG __cplusplus +#endif + +#if !defined(GTEST_INTERNAL_CPLUSPLUS_LANG) || \ + GTEST_INTERNAL_CPLUSPLUS_LANG < 201402L +#error C++ versions less than C++14 are not supported. +#endif + #include // for isspace, etc #include // for ptrdiff_t #include @@ -264,48 +277,56 @@ #include #include +// #include // Guarded by GTEST_IS_THREADSAFE below #include +#include #include +#include +#include +#include +#include +// #include // Guarded by GTEST_IS_THREADSAFE below +#include #include +#include #ifndef _WIN32_WCE -# include -# include +#include +#include #endif // !_WIN32_WCE #if defined __APPLE__ -# include -# include +#include +#include #endif -#include // NOLINT -#include -#include -#include // NOLINT -#include -#include // NOLINT - #include "gtest/internal/custom/gtest-port.h" #include "gtest/internal/gtest-port-arch.h" +#if GTEST_HAS_ABSL +#include "absl/flags/declare.h" +#include "absl/flags/flag.h" +#include "absl/flags/reflection.h" +#endif + #if !defined(GTEST_DEV_EMAIL_) -# define GTEST_DEV_EMAIL_ "googletestframework@@googlegroups.com" -# define GTEST_FLAG_PREFIX_ "gtest_" -# define GTEST_FLAG_PREFIX_DASH_ "gtest-" -# define GTEST_FLAG_PREFIX_UPPER_ "GTEST_" -# define GTEST_NAME_ "Google Test" -# define GTEST_PROJECT_URL_ "https://github.com/google/googletest/" +#define GTEST_DEV_EMAIL_ "googletestframework@@googlegroups.com" +#define GTEST_FLAG_PREFIX_ "gtest_" +#define GTEST_FLAG_PREFIX_DASH_ "gtest-" +#define GTEST_FLAG_PREFIX_UPPER_ "GTEST_" +#define GTEST_NAME_ "Google Test" +#define GTEST_PROJECT_URL_ "https://github.com/google/googletest/" #endif // !defined(GTEST_DEV_EMAIL_) #if !defined(GTEST_INIT_GOOGLE_TEST_NAME_) -# define GTEST_INIT_GOOGLE_TEST_NAME_ "testing::InitGoogleTest" +#define GTEST_INIT_GOOGLE_TEST_NAME_ "testing::InitGoogleTest" #endif // !defined(GTEST_INIT_GOOGLE_TEST_NAME_) // Determines the version of gcc that is used to compile this. #ifdef __GNUC__ // 40302 means version 4.3.2. -# define GTEST_GCC_VER_ \ - (__GNUC__*10000 + __GNUC_MINOR__*100 + __GNUC_PATCHLEVEL__) +#define GTEST_GCC_VER_ \ + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) #endif // __GNUC__ // Macros for disabling Microsoft Visual C++ warnings. @@ -314,41 +335,37 @@ // /* code that triggers warnings C4800 and C4385 */ // GTEST_DISABLE_MSC_WARNINGS_POP_() #if defined(_MSC_VER) -# define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) \ - __pragma(warning(push)) \ - __pragma(warning(disable: warnings)) -# define GTEST_DISABLE_MSC_WARNINGS_POP_() \ - __pragma(warning(pop)) +#define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) \ + __pragma(warning(push)) __pragma(warning(disable : warnings)) +#define GTEST_DISABLE_MSC_WARNINGS_POP_() __pragma(warning(pop)) #else // Not all compilers are MSVC -# define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) -# define GTEST_DISABLE_MSC_WARNINGS_POP_() +#define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) +#define GTEST_DISABLE_MSC_WARNINGS_POP_() #endif // Clang on Windows does not understand MSVC's pragma warning. // We need clang-specific way to disable function deprecation warning. #ifdef __clang__ -# define GTEST_DISABLE_MSC_DEPRECATED_PUSH_() \ - _Pragma("clang diagnostic push") \ - _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") \ - _Pragma("clang diagnostic ignored \"-Wdeprecated-implementations\"") -#define GTEST_DISABLE_MSC_DEPRECATED_POP_() \ - _Pragma("clang diagnostic pop") +#define GTEST_DISABLE_MSC_DEPRECATED_PUSH_() \ + _Pragma("clang diagnostic push") \ + _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") \ + _Pragma("clang diagnostic ignored \"-Wdeprecated-implementations\"") +#define GTEST_DISABLE_MSC_DEPRECATED_POP_() _Pragma("clang diagnostic pop") #else -# define GTEST_DISABLE_MSC_DEPRECATED_PUSH_() \ - GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996) -# define GTEST_DISABLE_MSC_DEPRECATED_POP_() \ - GTEST_DISABLE_MSC_WARNINGS_POP_() +#define GTEST_DISABLE_MSC_DEPRECATED_PUSH_() \ + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996) +#define GTEST_DISABLE_MSC_DEPRECATED_POP_() GTEST_DISABLE_MSC_WARNINGS_POP_() #endif // Brings in definitions for functions used in the testing::internal::posix // namespace (read, write, close, chdir, isatty, stat). We do not currently // use them on Windows Mobile. #if GTEST_OS_WINDOWS -# if !GTEST_OS_WINDOWS_MOBILE -# include -# include -# endif +#if !GTEST_OS_WINDOWS_MOBILE +#include +#include +#endif // In order to avoid having to include , use forward declaration #if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR) // MinGW defined _CRITICAL_SECTION and _RTL_CRITICAL_SECTION as two @@ -368,68 +385,56 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // This assumes that non-Windows OSes provide unistd.h. For OSes where this // is not the case, we need to include headers that provide the functions // mentioned above. -# include -# include +#include +#include #endif // GTEST_OS_WINDOWS #if GTEST_OS_LINUX_ANDROID // Used to define __ANDROID_API__ matching the target NDK API level. -# include // NOLINT +#include // NOLINT #endif // Defines this to true if and only if Google Test can use POSIX regular // expressions. #ifndef GTEST_HAS_POSIX_RE -# if GTEST_OS_LINUX_ANDROID +#if GTEST_OS_LINUX_ANDROID // On Android, is only available starting with Gingerbread. -# define GTEST_HAS_POSIX_RE (__ANDROID_API__ >= 9) -# else -#define GTEST_HAS_POSIX_RE (!GTEST_OS_WINDOWS && !GTEST_OS_XTENSA) -# endif +#define GTEST_HAS_POSIX_RE (__ANDROID_API__ >= 9) +#else +#define GTEST_HAS_POSIX_RE \ + !(GTEST_OS_WINDOWS || GTEST_OS_XTENSA || GTEST_OS_QURT) +#endif #endif -#if GTEST_USES_PCRE -// The appropriate headers have already been included. - +// Select the regular expression implementation. +#if GTEST_HAS_ABSL +// When using Abseil, RE2 is required. +#include "absl/strings/string_view.h" +#include "re2/re2.h" +#define GTEST_USES_RE2 1 #elif GTEST_HAS_POSIX_RE - -// On some platforms, needs someone to define size_t, and -// won't compile otherwise. We can #include it here as we already -// included , which is guaranteed to define size_t through -// . -# include // NOLINT - -# define GTEST_USES_POSIX_RE 1 - -#elif GTEST_OS_WINDOWS - -// is not available on Windows. Use our own simple regex -// implementation instead. -# define GTEST_USES_SIMPLE_RE 1 - +#include // NOLINT +#define GTEST_USES_POSIX_RE 1 #else - -// may not be available on this platform. Use our own -// simple regex implementation instead. -# define GTEST_USES_SIMPLE_RE 1 - -#endif // GTEST_USES_PCRE +// Use our own simple regex implementation. +#define GTEST_USES_SIMPLE_RE 1 +#endif #ifndef GTEST_HAS_EXCEPTIONS // The user didn't tell us whether exceptions are enabled, so we need // to figure it out. -# if defined(_MSC_VER) && defined(_CPPUNWIND) +#if defined(_MSC_VER) && defined(_CPPUNWIND) // MSVC defines _CPPUNWIND to 1 if and only if exceptions are enabled. -# define GTEST_HAS_EXCEPTIONS 1 -# elif defined(__BORLANDC__) +#define GTEST_HAS_EXCEPTIONS 1 +#elif defined(__BORLANDC__) // C++Builder's implementation of the STL uses the _HAS_EXCEPTIONS // macro to enable exceptions, so we'll do the same. // Assumes that exceptions are enabled by default. -# ifndef _HAS_EXCEPTIONS -# define _HAS_EXCEPTIONS 1 -# endif // _HAS_EXCEPTIONS -# define GTEST_HAS_EXCEPTIONS _HAS_EXCEPTIONS -# elif defined(__clang__) +#ifndef _HAS_EXCEPTIONS +#define _HAS_EXCEPTIONS 1 +#endif // _HAS_EXCEPTIONS +#define GTEST_HAS_EXCEPTIONS _HAS_EXCEPTIONS +#elif defined(__clang__) // clang defines __EXCEPTIONS if and only if exceptions are enabled before clang // 220714, but if and only if cleanups are enabled after that. In Obj-C++ files, // there can be cleanups for ObjC exceptions which also need cleanups, even if @@ -438,27 +443,27 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // cleanups prior to that. To reliably check for C++ exception availability with // clang, check for // __EXCEPTIONS && __has_feature(cxx_exceptions). -# define GTEST_HAS_EXCEPTIONS (__EXCEPTIONS && __has_feature(cxx_exceptions)) -# elif defined(__GNUC__) && __EXCEPTIONS +#define GTEST_HAS_EXCEPTIONS (__EXCEPTIONS && __has_feature(cxx_exceptions)) +#elif defined(__GNUC__) && __EXCEPTIONS // gcc defines __EXCEPTIONS to 1 if and only if exceptions are enabled. -# define GTEST_HAS_EXCEPTIONS 1 -# elif defined(__SUNPRO_CC) +#define GTEST_HAS_EXCEPTIONS 1 +#elif defined(__SUNPRO_CC) // Sun Pro CC supports exceptions. However, there is no compile-time way of // detecting whether they are enabled or not. Therefore, we assume that // they are enabled unless the user tells us otherwise. -# define GTEST_HAS_EXCEPTIONS 1 -# elif defined(__IBMCPP__) && __EXCEPTIONS +#define GTEST_HAS_EXCEPTIONS 1 +#elif defined(__IBMCPP__) && __EXCEPTIONS // xlC defines __EXCEPTIONS to 1 if and only if exceptions are enabled. -# define GTEST_HAS_EXCEPTIONS 1 -# elif defined(__HP_aCC) +#define GTEST_HAS_EXCEPTIONS 1 +#elif defined(__HP_aCC) // Exception handling is in effect by default in HP aCC compiler. It has to // be turned of by +noeh compiler option if desired. -# define GTEST_HAS_EXCEPTIONS 1 -# else +#define GTEST_HAS_EXCEPTIONS 1 +#else // For other compilers, we assume exceptions are disabled to be // conservative. -# define GTEST_HAS_EXCEPTIONS 0 -# endif // defined(_MSC_VER) || defined(__BORLANDC__) +#define GTEST_HAS_EXCEPTIONS 0 +#endif // defined(_MSC_VER) || defined(__BORLANDC__) #endif // GTEST_HAS_EXCEPTIONS #ifndef GTEST_HAS_STD_WSTRING @@ -469,72 +474,77 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // no support for it at least as recent as Froyo (2.2). #define GTEST_HAS_STD_WSTRING \ (!(GTEST_OS_LINUX_ANDROID || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS || \ - GTEST_OS_HAIKU || GTEST_OS_ESP32 || GTEST_OS_ESP8266 || GTEST_OS_XTENSA)) + GTEST_OS_HAIKU || GTEST_OS_ESP32 || GTEST_OS_ESP8266 || \ + GTEST_OS_XTENSA || GTEST_OS_QURT)) #endif // GTEST_HAS_STD_WSTRING +#ifndef GTEST_HAS_FILE_SYSTEM +// Most platforms support a file system. +#define GTEST_HAS_FILE_SYSTEM 1 +#endif // GTEST_HAS_FILE_SYSTEM + // Determines whether RTTI is available. #ifndef GTEST_HAS_RTTI // The user didn't tell us whether RTTI is enabled, so we need to // figure it out. -# ifdef _MSC_VER +#ifdef _MSC_VER #ifdef _CPPRTTI // MSVC defines this macro if and only if RTTI is enabled. -# define GTEST_HAS_RTTI 1 -# else -# define GTEST_HAS_RTTI 0 -# endif +#define GTEST_HAS_RTTI 1 +#else +#define GTEST_HAS_RTTI 0 +#endif // Starting with version 4.3.2, gcc defines __GXX_RTTI if and only if RTTI is // enabled. -# elif defined(__GNUC__) +#elif defined(__GNUC__) -# ifdef __GXX_RTTI +#ifdef __GXX_RTTI // When building against STLport with the Android NDK and with // -frtti -fno-exceptions, the build fails at link time with undefined // references to __cxa_bad_typeid. Note sure if STL or toolchain bug, // so disable RTTI when detected. -# if GTEST_OS_LINUX_ANDROID && defined(_STLPORT_MAJOR) && \ - !defined(__EXCEPTIONS) -# define GTEST_HAS_RTTI 0 -# else -# define GTEST_HAS_RTTI 1 -# endif // GTEST_OS_LINUX_ANDROID && __STLPORT_MAJOR && !__EXCEPTIONS -# else -# define GTEST_HAS_RTTI 0 -# endif // __GXX_RTTI +#if GTEST_OS_LINUX_ANDROID && defined(_STLPORT_MAJOR) && !defined(__EXCEPTIONS) +#define GTEST_HAS_RTTI 0 +#else +#define GTEST_HAS_RTTI 1 +#endif // GTEST_OS_LINUX_ANDROID && __STLPORT_MAJOR && !__EXCEPTIONS +#else +#define GTEST_HAS_RTTI 0 +#endif // __GXX_RTTI // Clang defines __GXX_RTTI starting with version 3.0, but its manual recommends // using has_feature instead. has_feature(cxx_rtti) is supported since 2.7, the // first version with C++ support. -# elif defined(__clang__) +#elif defined(__clang__) -# define GTEST_HAS_RTTI __has_feature(cxx_rtti) +#define GTEST_HAS_RTTI __has_feature(cxx_rtti) // Starting with version 9.0 IBM Visual Age defines __RTTI_ALL__ to 1 if // both the typeid and dynamic_cast features are present. -# elif defined(__IBMCPP__) && (__IBMCPP__ >= 900) +#elif defined(__IBMCPP__) && (__IBMCPP__ >= 900) -# ifdef __RTTI_ALL__ -# define GTEST_HAS_RTTI 1 -# else -# define GTEST_HAS_RTTI 0 -# endif +#ifdef __RTTI_ALL__ +#define GTEST_HAS_RTTI 1 +#else +#define GTEST_HAS_RTTI 0 +#endif -# else +#else // For all other compilers, we assume RTTI is enabled. -# define GTEST_HAS_RTTI 1 +#define GTEST_HAS_RTTI 1 -# endif // _MSC_VER +#endif // _MSC_VER #endif // GTEST_HAS_RTTI // It's this header's responsibility to #include when RTTI // is enabled. #if GTEST_HAS_RTTI -# include +#include #endif // Determines whether Google Test can use the pthreads library. @@ -554,10 +564,10 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; #if GTEST_HAS_PTHREAD // gtest-port.h guarantees to #include when GTEST_HAS_PTHREAD is // true. -# include // NOLINT +#include // NOLINT // For timespec and nanosleep, used below. -# include // NOLINT +#include // NOLINT #endif // Determines whether clone(2) is supported. @@ -567,24 +577,23 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; #ifndef GTEST_HAS_CLONE // The user didn't tell us, so we need to figure it out. -# if GTEST_OS_LINUX && !defined(__ia64__) -# if GTEST_OS_LINUX_ANDROID +#if GTEST_OS_LINUX && !defined(__ia64__) +#if GTEST_OS_LINUX_ANDROID // On Android, clone() became available at different API levels for each 32-bit // architecture. -# if defined(__LP64__) || \ - (defined(__arm__) && __ANDROID_API__ >= 9) || \ - (defined(__mips__) && __ANDROID_API__ >= 12) || \ - (defined(__i386__) && __ANDROID_API__ >= 17) -# define GTEST_HAS_CLONE 1 -# else -# define GTEST_HAS_CLONE 0 -# endif -# else -# define GTEST_HAS_CLONE 1 -# endif -# else -# define GTEST_HAS_CLONE 0 -# endif // GTEST_OS_LINUX && !defined(__ia64__) +#if defined(__LP64__) || (defined(__arm__) && __ANDROID_API__ >= 9) || \ + (defined(__mips__) && __ANDROID_API__ >= 12) || \ + (defined(__i386__) && __ANDROID_API__ >= 17) +#define GTEST_HAS_CLONE 1 +#else +#define GTEST_HAS_CLONE 0 +#endif +#else +#define GTEST_HAS_CLONE 1 +#endif +#else +#define GTEST_HAS_CLONE 0 +#endif // GTEST_OS_LINUX && !defined(__ia64__) #endif // GTEST_HAS_CLONE @@ -592,13 +601,15 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // output correctness and to implement death tests. #ifndef GTEST_HAS_STREAM_REDIRECTION // By default, we assume that stream redirection is supported on all -// platforms except known mobile ones. -#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \ - GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_XTENSA -# define GTEST_HAS_STREAM_REDIRECTION 0 -# else -# define GTEST_HAS_STREAM_REDIRECTION 1 -# endif // !GTEST_OS_WINDOWS_MOBILE +// platforms except known mobile / embedded ones. Also, if the port doesn't have +// a file system, stream redirection is not supported. +#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \ + GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_XTENSA || \ + GTEST_OS_QURT || !GTEST_HAS_FILE_SYSTEM +#define GTEST_HAS_STREAM_REDIRECTION 0 +#else +#define GTEST_HAS_STREAM_REDIRECTION 1 +#endif // !GTEST_OS_WINDOWS_MOBILE #endif // GTEST_HAS_STREAM_REDIRECTION // Determines whether to support death tests. @@ -610,7 +621,10 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; GTEST_OS_FREEBSD || GTEST_OS_NETBSD || GTEST_OS_FUCHSIA || \ GTEST_OS_DRAGONFLY || GTEST_OS_GNU_KFREEBSD || GTEST_OS_HAIKU || \ GTEST_OS_GNU_HURD) -# define GTEST_HAS_DEATH_TEST 1 +// Death tests require a file system to work properly. +#if GTEST_HAS_FILE_SYSTEM +#define GTEST_HAS_DEATH_TEST 1 +#endif // GTEST_HAS_FILE_SYSTEM #endif // Determines whether to support type-driven tests. @@ -619,8 +633,8 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // Sun Pro CC, IBM Visual Age, and HP aCC support. #if defined(__GNUC__) || defined(_MSC_VER) || defined(__SUNPRO_CC) || \ defined(__IBMCPP__) || defined(__HP_aCC) -# define GTEST_HAS_TYPED_TEST 1 -# define GTEST_HAS_TYPED_TEST_P 1 +#define GTEST_HAS_TYPED_TEST 1 +#define GTEST_HAS_TYPED_TEST_P 1 #endif // Determines whether the system compiler uses UTF-16 for encoding wide strings. @@ -631,7 +645,7 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; #if GTEST_OS_LINUX || GTEST_OS_GNU_KFREEBSD || GTEST_OS_DRAGONFLY || \ GTEST_OS_FREEBSD || GTEST_OS_NETBSD || GTEST_OS_OPENBSD || \ GTEST_OS_GNU_HURD -# define GTEST_CAN_STREAM_RESULTS_ 1 +#define GTEST_CAN_STREAM_RESULTS_ 1 #endif // Defines some utility macros. @@ -645,83 +659,75 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // // The "switch (0) case 0:" idiom is used to suppress this. #ifdef __INTEL_COMPILER -# define GTEST_AMBIGUOUS_ELSE_BLOCKER_ +#define GTEST_AMBIGUOUS_ELSE_BLOCKER_ #else -# define GTEST_AMBIGUOUS_ELSE_BLOCKER_ switch (0) case 0: default: // NOLINT +#define GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + switch (0) \ + case 0: \ + default: // NOLINT #endif -// Use this annotation at the end of a struct/class definition to -// prevent the compiler from optimizing away instances that are never -// used. This is useful when all interesting logic happens inside the -// c'tor and / or d'tor. Example: +// GTEST_HAVE_ATTRIBUTE_ // -// struct Foo { -// Foo() { ... } -// } GTEST_ATTRIBUTE_UNUSED_; +// A function-like feature checking macro that is a wrapper around +// `__has_attribute`, which is defined by GCC 5+ and Clang and evaluates to a +// nonzero constant integer if the attribute is supported or 0 if not. // -// Also use it after a variable or parameter declaration to tell the -// compiler the variable/parameter does not have to be used. -#if defined(__GNUC__) && !defined(COMPILER_ICC) -# define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused)) -#elif defined(__clang__) -# if __has_attribute(unused) -# define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused)) -# endif +// It evaluates to zero if `__has_attribute` is not defined by the compiler. +// +// GCC: https://gcc.gnu.org/gcc-5/changes.html +// Clang: https://clang.llvm.org/docs/LanguageExtensions.html +#ifdef __has_attribute +#define GTEST_HAVE_ATTRIBUTE_(x) __has_attribute(x) +#else +#define GTEST_HAVE_ATTRIBUTE_(x) 0 #endif -#ifndef GTEST_ATTRIBUTE_UNUSED_ -# define GTEST_ATTRIBUTE_UNUSED_ + +// GTEST_HAVE_FEATURE_ +// +// A function-like feature checking macro that is a wrapper around +// `__has_feature`. +#ifdef __has_feature +#define GTEST_HAVE_FEATURE_(x) __has_feature(x) +#else +#define GTEST_HAVE_FEATURE_(x) 0 +#endif + +// Use this annotation after a variable or parameter declaration to tell the +// compiler the variable/parameter does not have to be used. +// Example: +// +// GTEST_ATTRIBUTE_UNUSED_ int foo = bar(); +#if GTEST_HAVE_ATTRIBUTE_(unused) +#define GTEST_ATTRIBUTE_UNUSED_ __attribute__((unused)) +#else +#define GTEST_ATTRIBUTE_UNUSED_ #endif // Use this annotation before a function that takes a printf format string. -#if (defined(__GNUC__) || defined(__clang__)) && !defined(COMPILER_ICC) -# if defined(__MINGW_PRINTF_FORMAT) +#if GTEST_HAVE_ATTRIBUTE_(format) && defined(__MINGW_PRINTF_FORMAT) // MinGW has two different printf implementations. Ensure the format macro // matches the selected implementation. See // https://sourceforge.net/p/mingw-w64/wiki2/gnu%20printf/. -# define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \ - __attribute__((__format__(__MINGW_PRINTF_FORMAT, string_index, \ - first_to_check))) -# else -# define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \ - __attribute__((__format__(__printf__, string_index, first_to_check))) -# endif +#define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \ + __attribute__((format(__MINGW_PRINTF_FORMAT, string_index, first_to_check))) +#elif GTEST_HAVE_ATTRIBUTE_(format) +#define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \ + __attribute__((format(printf, string_index, first_to_check))) #else -# define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) +#define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) #endif - -// A macro to disallow copy operator= -// This should be used in the private: declarations for a class. -#define GTEST_DISALLOW_ASSIGN_(type) \ - type& operator=(type const &) = delete - -// A macro to disallow copy constructor and operator= -// This should be used in the private: declarations for a class. -#define GTEST_DISALLOW_COPY_AND_ASSIGN_(type) \ - type(type const&) = delete; \ - type& operator=(type const&) = delete - -// A macro to disallow move operator= -// This should be used in the private: declarations for a class. -#define GTEST_DISALLOW_MOVE_ASSIGN_(type) \ - type& operator=(type &&) noexcept = delete - -// A macro to disallow move constructor and operator= -// This should be used in the private: declarations for a class. -#define GTEST_DISALLOW_MOVE_AND_ASSIGN_(type) \ - type(type&&) noexcept = delete; \ - type& operator=(type&&) noexcept = delete - // Tell the compiler to warn about unused return values for functions declared // with this macro. The macro should be used on function declarations // following the argument list: // // Sprocket* AllocateSprocket() GTEST_MUST_USE_RESULT_; -#if defined(__GNUC__) && !defined(COMPILER_ICC) -# define GTEST_MUST_USE_RESULT_ __attribute__ ((warn_unused_result)) +#if GTEST_HAVE_ATTRIBUTE_(warn_unused_result) +#define GTEST_MUST_USE_RESULT_ __attribute__((warn_unused_result)) #else -# define GTEST_MUST_USE_RESULT_ -#endif // __GNUC__ && !COMPILER_ICC +#define GTEST_MUST_USE_RESULT_ +#endif // MS C++ compiler emits warning when a conditional expression is compile time // constant. In some contexts this warning is false positive and needs to be @@ -731,10 +737,9 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; // while (true) { // GTEST_INTENTIONAL_CONST_COND_POP_() // } -# define GTEST_INTENTIONAL_CONST_COND_PUSH_() \ - GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127) -# define GTEST_INTENTIONAL_CONST_COND_POP_() \ - GTEST_DISABLE_MSC_WARNINGS_POP_() +#define GTEST_INTENTIONAL_CONST_COND_PUSH_() \ + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127) +#define GTEST_INTENTIONAL_CONST_COND_POP_() GTEST_DISABLE_MSC_WARNINGS_POP_() // Determine whether the compiler supports Microsoft's Structured Exception // Handling. This is supported by several Windows compilers but generally @@ -742,13 +747,13 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; #ifndef GTEST_HAS_SEH // The user didn't tell us, so we need to figure it out. -# if defined(_MSC_VER) || defined(__BORLANDC__) +#if defined(_MSC_VER) || defined(__BORLANDC__) // These two compilers are known to support SEH. -# define GTEST_HAS_SEH 1 -# else +#define GTEST_HAS_SEH 1 +#else // Assume no SEH. -# define GTEST_HAS_SEH 0 -# endif +#define GTEST_HAS_SEH 0 +#endif #endif // GTEST_HAS_SEH @@ -761,95 +766,95 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION; #endif // GTEST_IS_THREADSAFE +#if GTEST_IS_THREADSAFE +// Some platforms don't support including these threading related headers. +#include // NOLINT +#include // NOLINT +#endif // GTEST_IS_THREADSAFE + // GTEST_API_ qualifies all symbols that must be exported. The definitions below // are guarded by #ifndef to give embedders a chance to define GTEST_API_ in // gtest/internal/custom/gtest-port.h #ifndef GTEST_API_ #ifdef _MSC_VER -# if GTEST_LINKED_AS_SHARED_LIBRARY -# define GTEST_API_ __declspec(dllimport) -# elif GTEST_CREATE_SHARED_LIBRARY -# define GTEST_API_ __declspec(dllexport) -# endif -#elif __GNUC__ >= 4 || defined(__clang__) -# define GTEST_API_ __attribute__((visibility ("default"))) +#if GTEST_LINKED_AS_SHARED_LIBRARY +#define GTEST_API_ __declspec(dllimport) +#elif GTEST_CREATE_SHARED_LIBRARY +#define GTEST_API_ __declspec(dllexport) +#endif +#elif GTEST_HAVE_ATTRIBUTE_(visibility) +#define GTEST_API_ __attribute__((visibility("default"))) #endif // _MSC_VER #endif // GTEST_API_ #ifndef GTEST_API_ -# define GTEST_API_ +#define GTEST_API_ #endif // GTEST_API_ #ifndef GTEST_DEFAULT_DEATH_TEST_STYLE -# define GTEST_DEFAULT_DEATH_TEST_STYLE "fast" +#define GTEST_DEFAULT_DEATH_TEST_STYLE "fast" #endif // GTEST_DEFAULT_DEATH_TEST_STYLE -#ifdef __GNUC__ +#if GTEST_HAVE_ATTRIBUTE_(noinline) // Ask the compiler to never inline a given function. -# define GTEST_NO_INLINE_ __attribute__((noinline)) +#define GTEST_NO_INLINE_ __attribute__((noinline)) +#else +#define GTEST_NO_INLINE_ +#endif + +#if GTEST_HAVE_ATTRIBUTE_(disable_tail_calls) +// Ask the compiler not to perform tail call optimization inside +// the marked function. +#define GTEST_NO_TAIL_CALL_ __attribute__((disable_tail_calls)) +#elif __GNUC__ +#define GTEST_NO_TAIL_CALL_ \ + __attribute__((optimize("no-optimize-sibling-calls"))) #else -# define GTEST_NO_INLINE_ +#define GTEST_NO_TAIL_CALL_ #endif // _LIBCPP_VERSION is defined by the libc++ library from the LLVM project. #if !defined(GTEST_HAS_CXXABI_H_) -# if defined(__GLIBCXX__) || (defined(_LIBCPP_VERSION) && !defined(_MSC_VER)) -# define GTEST_HAS_CXXABI_H_ 1 -# else -# define GTEST_HAS_CXXABI_H_ 0 -# endif +#if defined(__GLIBCXX__) || (defined(_LIBCPP_VERSION) && !defined(_MSC_VER)) +#define GTEST_HAS_CXXABI_H_ 1 +#else +#define GTEST_HAS_CXXABI_H_ 0 +#endif #endif // A function level attribute to disable checking for use of uninitialized // memory when built with MemorySanitizer. -#if defined(__clang__) -# if __has_feature(memory_sanitizer) -# define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ \ - __attribute__((no_sanitize_memory)) -# else -# define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ -# endif // __has_feature(memory_sanitizer) +#if GTEST_HAVE_ATTRIBUTE_(no_sanitize_memory) +#define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ __attribute__((no_sanitize_memory)) #else -# define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ -#endif // __clang__ +#define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ +#endif // A function level attribute to disable AddressSanitizer instrumentation. -#if defined(__clang__) -# if __has_feature(address_sanitizer) -# define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ \ - __attribute__((no_sanitize_address)) -# else -# define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ -# endif // __has_feature(address_sanitizer) +#if GTEST_HAVE_ATTRIBUTE_(no_sanitize_address) +#define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ \ + __attribute__((no_sanitize_address)) #else -# define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ -#endif // __clang__ +#define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ +#endif // A function level attribute to disable HWAddressSanitizer instrumentation. -#if defined(__clang__) -# if __has_feature(hwaddress_sanitizer) -# define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ \ - __attribute__((no_sanitize("hwaddress"))) -# else -# define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ -# endif // __has_feature(hwaddress_sanitizer) +#if GTEST_HAVE_FEATURE_(hwaddress_sanitizer) && \ + GTEST_HAVE_ATTRIBUTE_(no_sanitize) +#define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ \ + __attribute__((no_sanitize("hwaddress"))) #else -# define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ -#endif // __clang__ +#define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ +#endif // A function level attribute to disable ThreadSanitizer instrumentation. -#if defined(__clang__) -# if __has_feature(thread_sanitizer) -# define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ \ - __attribute__((no_sanitize_thread)) -# else -# define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ -# endif // __has_feature(thread_sanitizer) +#if GTEST_HAVE_ATTRIBUTE_(no_sanitize_thread) +#define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ __attribute((no_sanitize_thread)) #else -# define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ -#endif // __clang__ +#define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ +#endif namespace testing { @@ -870,25 +875,37 @@ namespace internal { // Secret object, which is what we want. class Secret; -// The GTEST_COMPILE_ASSERT_ is a legacy macro used to verify that a compile -// time expression is true (in new code, use static_assert instead). For -// example, you could use it to verify the size of a static array: -// -// GTEST_COMPILE_ASSERT_(GTEST_ARRAY_SIZE_(names) == NUM_NAMES, -// names_incorrect_size); -// -// The second argument to the macro must be a valid C++ identifier. If the -// expression is false, compiler will issue an error containing this identifier. -#define GTEST_COMPILE_ASSERT_(expr, msg) static_assert(expr, #msg) - // A helper for suppressing warnings on constant condition. It just // returns 'condition'. GTEST_API_ bool IsTrue(bool condition); // Defines RE. -#if GTEST_USES_PCRE -// if used, PCRE is injected by custom/gtest-port.h +#if GTEST_USES_RE2 + +// This is almost `using RE = ::RE2`, except it is copy-constructible, and it +// needs to disambiguate the `std::string`, `absl::string_view`, and `const +// char*` constructors. +class GTEST_API_ RE { + public: + RE(absl::string_view regex) : regex_(regex) {} // NOLINT + RE(const char* regex) : RE(absl::string_view(regex)) {} // NOLINT + RE(const std::string& regex) : RE(absl::string_view(regex)) {} // NOLINT + RE(const RE& other) : RE(other.pattern()) {} + + const std::string& pattern() const { return regex_.pattern(); } + + static bool FullMatch(absl::string_view str, const RE& re) { + return RE2::FullMatch(str, re.regex_); + } + static bool PartialMatch(absl::string_view str, const RE& re) { + return RE2::PartialMatch(str, re.regex_); + } + + private: + RE2 regex_; +}; + #elif GTEST_USES_POSIX_RE || GTEST_USES_SIMPLE_RE // A simple C++ wrapper for . It uses the POSIX Extended @@ -927,19 +944,19 @@ class GTEST_API_ RE { const char* pattern_; bool is_valid_; -# if GTEST_USES_POSIX_RE +#if GTEST_USES_POSIX_RE regex_t full_regex_; // For FullMatch(). regex_t partial_regex_; // For PartialMatch(). -# else // GTEST_USES_SIMPLE_RE +#else // GTEST_USES_SIMPLE_RE const char* full_pattern_; // For FullMatch(); -# endif +#endif }; -#endif // GTEST_USES_PCRE +#endif // ::testing::internal::RE implementation // Formats a source file path and a line number as they would appear // in an error message from the compiler used to compile this code. @@ -957,12 +974,7 @@ GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(const char* file, // LogToStderr() - directs all log messages to stderr. // FlushInfoLog() - flushes informational log messages. -enum GTestLogSeverity { - GTEST_INFO, - GTEST_WARNING, - GTEST_ERROR, - GTEST_FATAL -}; +enum GTestLogSeverity { GTEST_INFO, GTEST_WARNING, GTEST_ERROR, GTEST_FATAL }; // Formats log entry severity, provides a stream object for streaming the // log message, and terminates the message with a newline when going out of @@ -979,14 +991,16 @@ class GTEST_API_ GTestLog { private: const GTestLogSeverity severity_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestLog); + GTestLog(const GTestLog&) = delete; + GTestLog& operator=(const GTestLog&) = delete; }; #if !defined(GTEST_LOG_) -# define GTEST_LOG_(severity) \ - ::testing::internal::GTestLog(::testing::internal::GTEST_##severity, \ - __FILE__, __LINE__).GetStream() +#define GTEST_LOG_(severity) \ + ::testing::internal::GTestLog(::testing::internal::GTEST_##severity, \ + __FILE__, __LINE__) \ + .GetStream() inline void LogToStderr() {} inline void FlushInfoLog() { fflush(nullptr); } @@ -998,7 +1012,7 @@ inline void FlushInfoLog() { fflush(nullptr); } // // GTEST_CHECK_ is an all-mode assert. It aborts the program if the condition // is not satisfied. -// Synopsys: +// Synopsis: // GTEST_CHECK_(boolean_condition); // or // GTEST_CHECK_(boolean_condition) << "Additional message"; @@ -1008,12 +1022,12 @@ inline void FlushInfoLog() { fflush(nullptr); } // condition itself, plus additional message streamed into it, if any, // and then it aborts the program. It aborts the program irrespective of // whether it is built in the debug mode or not. -# define GTEST_CHECK_(condition) \ - GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ - if (::testing::internal::IsTrue(condition)) \ - ; \ - else \ - GTEST_LOG_(FATAL) << "Condition " #condition " failed. " +#define GTEST_CHECK_(condition) \ + GTEST_AMBIGUOUS_ELSE_BLOCKER_ \ + if (::testing::internal::IsTrue(condition)) \ + ; \ + else \ + GTEST_LOG_(FATAL) << "Condition " #condition " failed. " #endif // !defined(GTEST_CHECK_) // An all-mode assert to verify that the given POSIX-style function @@ -1022,9 +1036,8 @@ inline void FlushInfoLog() { fflush(nullptr); } // in {} if you need to use it as the only statement in an 'if' // branch. #define GTEST_CHECK_POSIX_SUCCESS_(posix_call) \ - if (const int gtest_error = (posix_call)) \ - GTEST_LOG_(FATAL) << #posix_call << "failed with error " \ - << gtest_error + if (const int gtest_error = (posix_call)) \ + GTEST_LOG_(FATAL) << #posix_call << "failed with error " << gtest_error // Transforms "T" into "const T&" according to standard reference collapsing // rules (this is only needed as a backport for C++98 compilers that do not @@ -1038,9 +1051,13 @@ inline void FlushInfoLog() { fflush(nullptr); } // Note that the non-const reference will not have "const" added. This is // standard, and necessary so that "T" can always bind to "const T&". template -struct ConstRef { typedef const T& type; }; +struct ConstRef { + typedef const T& type; +}; template -struct ConstRef { typedef T& type; }; +struct ConstRef { + typedef T& type; +}; // The argument T must depend on some template parameters. #define GTEST_REFERENCE_TO_CONST_(T) \ @@ -1053,7 +1070,7 @@ struct ConstRef { typedef T& type; }; // const Foo*). When you use ImplicitCast_, the compiler checks that // the cast is safe. Such explicit ImplicitCast_s are necessary in // surprisingly many situations where C++ demands an exact type match -// instead of an argument type convertable to a target type. +// instead of an argument type convertible to a target type. // // The syntax for using ImplicitCast_ is the same as for static_cast: // @@ -1066,8 +1083,10 @@ struct ConstRef { typedef T& type; }; // This relatively ugly name is intentional. It prevents clashes with // similar functions users may have (e.g., implicit_cast). The internal // namespace alone is not enough because the function can be found by ADL. -template -inline To ImplicitCast_(To x) { return x; } +template +inline To ImplicitCast_(To x) { + return x; +} // When you upcast (that is, cast a pointer from type Foo to type // SuperclassOfFoo), it's fine to use ImplicitCast_<>, since upcasts @@ -1090,17 +1109,17 @@ inline To ImplicitCast_(To x) { return x; } // This relatively ugly name is intentional. It prevents clashes with // similar functions users may have (e.g., down_cast). The internal // namespace alone is not enough because the function can be found by ADL. -template // use like this: DownCast_(foo); -inline To DownCast_(From* f) { // so we only accept pointers +template // use like this: DownCast_(foo); +inline To DownCast_(From* f) { // so we only accept pointers // Ensures that To is a sub-type of From *. This test is here only // for compile-time type checking, and has no overhead in an // optimized build at run-time, as it will be optimized away // completely. GTEST_INTENTIONAL_CONST_COND_PUSH_() if (false) { - GTEST_INTENTIONAL_CONST_COND_POP_() - const To to = nullptr; - ::testing::internal::ImplicitCast_(to); + GTEST_INTENTIONAL_CONST_COND_POP_() + const To to = nullptr; + ::testing::internal::ImplicitCast_(to); } #if GTEST_HAS_RTTI @@ -1165,71 +1184,8 @@ void ClearInjectableArgvs(); // Defines synchronization primitives. #if GTEST_IS_THREADSAFE -# if GTEST_HAS_PTHREAD -// Sleeps for (roughly) n milliseconds. This function is only for testing -// Google Test's own constructs. Don't use it in user tests, either -// directly or indirectly. -inline void SleepMilliseconds(int n) { - const timespec time = { - 0, // 0 seconds. - n * 1000L * 1000L, // And n ms. - }; - nanosleep(&time, nullptr); -} -# endif // GTEST_HAS_PTHREAD - -# if GTEST_HAS_NOTIFICATION_ -// Notification has already been imported into the namespace. -// Nothing to do here. - -# elif GTEST_HAS_PTHREAD -// Allows a controller thread to pause execution of newly created -// threads until notified. Instances of this class must be created -// and destroyed in the controller thread. -// -// This class is only for testing Google Test's own constructs. Do not -// use it in user tests, either directly or indirectly. -class Notification { - public: - Notification() : notified_(false) { - GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, nullptr)); - } - ~Notification() { - pthread_mutex_destroy(&mutex_); - } - - // Notifies all threads created with this notification to start. Must - // be called from the controller thread. - void Notify() { - pthread_mutex_lock(&mutex_); - notified_ = true; - pthread_mutex_unlock(&mutex_); - } - - // Blocks until the controller thread notifies. Must be called from a test - // thread. - void WaitForNotification() { - for (;;) { - pthread_mutex_lock(&mutex_); - const bool notified = notified_; - pthread_mutex_unlock(&mutex_); - if (notified) - break; - SleepMilliseconds(10); - } - } - - private: - pthread_mutex_t mutex_; - bool notified_; - - GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification); -}; - -# elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT - -GTEST_API_ void SleepMilliseconds(int n); +#if GTEST_OS_WINDOWS // Provides leak-safe Windows kernel handle ownership. // Used in death tests and in threading support. class GTEST_API_ AutoHandle { @@ -1256,8 +1212,18 @@ class GTEST_API_ AutoHandle { Handle handle_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(AutoHandle); + AutoHandle(const AutoHandle&) = delete; + AutoHandle& operator=(const AutoHandle&) = delete; }; +#endif + +#if GTEST_HAS_NOTIFICATION_ +// Notification has already been imported into the namespace. +// Nothing to do here. + +#else +GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ +/* class A needs to have dll-interface to be used by clients of class B */) // Allows a controller thread to pause execution of newly created // threads until notified. Instances of this class must be created @@ -1265,23 +1231,40 @@ class GTEST_API_ AutoHandle { // // This class is only for testing Google Test's own constructs. Do not // use it in user tests, either directly or indirectly. +// TODO(b/203539622): Replace unconditionally with absl::Notification. class GTEST_API_ Notification { public: - Notification(); - void Notify(); - void WaitForNotification(); + Notification() : notified_(false) {} + Notification(const Notification&) = delete; + Notification& operator=(const Notification&) = delete; - private: - AutoHandle event_; + // Notifies all threads created with this notification to start. Must + // be called from the controller thread. + void Notify() { + std::lock_guard lock(mu_); + notified_ = true; + cv_.notify_all(); + } - GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification); + // Blocks until the controller thread notifies. Must be called from a test + // thread. + void WaitForNotification() { + std::unique_lock lock(mu_); + cv_.wait(lock, [this]() { return notified_; }); + } + + private: + std::mutex mu_; + std::condition_variable cv_; + bool notified_; }; -# endif // GTEST_HAS_NOTIFICATION_ +GTEST_DISABLE_MSC_WARNINGS_POP_() // 4251 +#endif // GTEST_HAS_NOTIFICATION_ // On MinGW, we can have both GTEST_OS_WINDOWS and GTEST_HAS_PTHREAD // defined, but we don't want to use MinGW's pthreads implementation, which // has conformance problems with some versions of the POSIX standard. -# if GTEST_HAS_PTHREAD && !GTEST_OS_WINDOWS_MINGW +#if GTEST_HAS_PTHREAD && !GTEST_OS_WINDOWS_MINGW // As a C-function, ThreadFuncWithCLinkage cannot be templated itself. // Consequently, it cannot select a correct instantiation of ThreadWithParam @@ -1357,16 +1340,17 @@ class ThreadWithParam : public ThreadWithParamBase { // finished. pthread_t thread_; // The native thread object. - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam); + ThreadWithParam(const ThreadWithParam&) = delete; + ThreadWithParam& operator=(const ThreadWithParam&) = delete; }; -# endif // !GTEST_OS_WINDOWS && GTEST_HAS_PTHREAD || - // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ +#endif // !GTEST_OS_WINDOWS && GTEST_HAS_PTHREAD || + // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ -# if GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ +#if GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ // Mutex and ThreadLocal have already been imported into the namespace. // Nothing to do here. -# elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT +#elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT // Mutex implements mutex on Windows platforms. It is used in conjunction // with class MutexLock: @@ -1420,14 +1404,15 @@ class GTEST_API_ Mutex { long critical_section_init_phase_; // NOLINT GTEST_CRITICAL_SECTION* critical_section_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex); + Mutex(const Mutex&) = delete; + Mutex& operator=(const Mutex&) = delete; }; -# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ - extern ::testing::internal::Mutex mutex +#define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ + extern ::testing::internal::Mutex mutex -# define GTEST_DEFINE_STATIC_MUTEX_(mutex) \ - ::testing::internal::Mutex mutex(::testing::internal::Mutex::kStaticMutex) +#define GTEST_DEFINE_STATIC_MUTEX_(mutex) \ + ::testing::internal::Mutex mutex(::testing::internal::Mutex::kStaticMutex) // We cannot name this class MutexLock because the ctor declaration would // conflict with a macro named MutexLock, which is defined on some @@ -1436,15 +1421,15 @@ class GTEST_API_ Mutex { // "MutexLock l(&mu)". Hence the typedef trick below. class GTestMutexLock { public: - explicit GTestMutexLock(Mutex* mutex) - : mutex_(mutex) { mutex_->Lock(); } + explicit GTestMutexLock(Mutex* mutex) : mutex_(mutex) { mutex_->Lock(); } ~GTestMutexLock() { mutex_->Unlock(); } private: Mutex* const mutex_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock); + GTestMutexLock(const GTestMutexLock&) = delete; + GTestMutexLock& operator=(const GTestMutexLock&) = delete; }; typedef GTestMutexLock MutexLock; @@ -1471,7 +1456,8 @@ class ThreadLocalBase { virtual ~ThreadLocalBase() {} private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocalBase); + ThreadLocalBase(const ThreadLocalBase&) = delete; + ThreadLocalBase& operator=(const ThreadLocalBase&) = delete; }; // Maps a thread to a set of ThreadLocals that have values instantiated on that @@ -1500,7 +1486,7 @@ class GTEST_API_ ThreadWithParamBase { virtual void Run() = 0; }; - ThreadWithParamBase(Runnable *runnable, Notification* thread_can_start); + ThreadWithParamBase(Runnable* runnable, Notification* thread_can_start); virtual ~ThreadWithParamBase(); private: @@ -1514,30 +1500,26 @@ class ThreadWithParam : public ThreadWithParamBase { typedef void UserThreadFunc(T); ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start) - : ThreadWithParamBase(new RunnableImpl(func, param), thread_can_start) { - } + : ThreadWithParamBase(new RunnableImpl(func, param), thread_can_start) {} virtual ~ThreadWithParam() {} private: class RunnableImpl : public Runnable { public: - RunnableImpl(UserThreadFunc* func, T param) - : func_(func), - param_(param) { - } + RunnableImpl(UserThreadFunc* func, T param) : func_(func), param_(param) {} virtual ~RunnableImpl() {} - virtual void Run() { - func_(param_); - } + virtual void Run() { func_(param_); } private: UserThreadFunc* const func_; const T param_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(RunnableImpl); + RunnableImpl(const RunnableImpl&) = delete; + RunnableImpl& operator=(const RunnableImpl&) = delete; }; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam); + ThreadWithParam(const ThreadWithParam&) = delete; + ThreadWithParam& operator=(const ThreadWithParam&) = delete; }; // Implements thread-local storage on Windows systems. @@ -1574,7 +1556,7 @@ class ThreadLocal : public ThreadLocalBase { explicit ThreadLocal(const T& value) : default_factory_(new InstanceValueHolderFactory(value)) {} - ~ThreadLocal() { ThreadLocalRegistry::OnThreadLocalDestroyed(this); } + ~ThreadLocal() override { ThreadLocalRegistry::OnThreadLocalDestroyed(this); } T* pointer() { return GetOrCreateValue(); } const T* pointer() const { return GetOrCreateValue(); } @@ -1593,16 +1575,17 @@ class ThreadLocal : public ThreadLocalBase { private: T value_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder); + ValueHolder(const ValueHolder&) = delete; + ValueHolder& operator=(const ValueHolder&) = delete; }; - T* GetOrCreateValue() const { return static_cast( - ThreadLocalRegistry::GetValueOnCurrentThread(this))->pointer(); + ThreadLocalRegistry::GetValueOnCurrentThread(this)) + ->pointer(); } - virtual ThreadLocalValueHolderBase* NewValueForCurrentThread() const { + ThreadLocalValueHolderBase* NewValueForCurrentThread() const override { return default_factory_->MakeNewHolder(); } @@ -1613,7 +1596,8 @@ class ThreadLocal : public ThreadLocalBase { virtual ValueHolder* MakeNewHolder() const = 0; private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory); + ValueHolderFactory(const ValueHolderFactory&) = delete; + ValueHolderFactory& operator=(const ValueHolderFactory&) = delete; }; class DefaultValueHolderFactory : public ValueHolderFactory { @@ -1622,7 +1606,9 @@ class ThreadLocal : public ThreadLocalBase { ValueHolder* MakeNewHolder() const override { return new ValueHolder(); } private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory); + DefaultValueHolderFactory(const DefaultValueHolderFactory&) = delete; + DefaultValueHolderFactory& operator=(const DefaultValueHolderFactory&) = + delete; }; class InstanceValueHolderFactory : public ValueHolderFactory { @@ -1635,15 +1621,18 @@ class ThreadLocal : public ThreadLocalBase { private: const T value_; // The value for each thread. - GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory); + InstanceValueHolderFactory(const InstanceValueHolderFactory&) = delete; + InstanceValueHolderFactory& operator=(const InstanceValueHolderFactory&) = + delete; }; std::unique_ptr default_factory_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal); + ThreadLocal(const ThreadLocal&) = delete; + ThreadLocal& operator=(const ThreadLocal&) = delete; }; -# elif GTEST_HAS_PTHREAD +#elif GTEST_HAS_PTHREAD // MutexBase and Mutex implement mutex on pthreads-based platforms. class MutexBase { @@ -1690,8 +1679,8 @@ class MutexBase { }; // Forward-declares a static mutex. -# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ - extern ::testing::internal::MutexBase mutex +#define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ + extern ::testing::internal::MutexBase mutex // Defines and statically (i.e. at link time) initializes a static mutex. // The initialization list here does not explicitly initialize each field, @@ -1710,12 +1699,11 @@ class Mutex : public MutexBase { GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, nullptr)); has_owner_ = false; } - ~Mutex() { - GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&mutex_)); - } + ~Mutex() { GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&mutex_)); } private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex); + Mutex(const Mutex&) = delete; + Mutex& operator=(const Mutex&) = delete; }; // We cannot name this class MutexLock because the ctor declaration would @@ -1725,15 +1713,15 @@ class Mutex : public MutexBase { // "MutexLock l(&mu)". Hence the typedef trick below. class GTestMutexLock { public: - explicit GTestMutexLock(MutexBase* mutex) - : mutex_(mutex) { mutex_->Lock(); } + explicit GTestMutexLock(MutexBase* mutex) : mutex_(mutex) { mutex_->Lock(); } ~GTestMutexLock() { mutex_->Unlock(); } private: MutexBase* const mutex_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock); + GTestMutexLock(const GTestMutexLock&) = delete; + GTestMutexLock& operator=(const GTestMutexLock&) = delete; }; typedef GTestMutexLock MutexLock; @@ -1744,7 +1732,7 @@ typedef GTestMutexLock MutexLock; // C-linkage. Therefore it cannot be templatized to access // ThreadLocal. Hence the need for class // ThreadLocalValueHolderBase. -class ThreadLocalValueHolderBase { +class GTEST_API_ ThreadLocalValueHolderBase { public: virtual ~ThreadLocalValueHolderBase() {} }; @@ -1790,7 +1778,8 @@ class GTEST_API_ ThreadLocal { private: T value_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder); + ValueHolder(const ValueHolder&) = delete; + ValueHolder& operator=(const ValueHolder&) = delete; }; static pthread_key_t CreateKey() { @@ -1822,7 +1811,8 @@ class GTEST_API_ ThreadLocal { virtual ValueHolder* MakeNewHolder() const = 0; private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory); + ValueHolderFactory(const ValueHolderFactory&) = delete; + ValueHolderFactory& operator=(const ValueHolderFactory&) = delete; }; class DefaultValueHolderFactory : public ValueHolderFactory { @@ -1831,7 +1821,9 @@ class GTEST_API_ ThreadLocal { ValueHolder* MakeNewHolder() const override { return new ValueHolder(); } private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory); + DefaultValueHolderFactory(const DefaultValueHolderFactory&) = delete; + DefaultValueHolderFactory& operator=(const DefaultValueHolderFactory&) = + delete; }; class InstanceValueHolderFactory : public ValueHolderFactory { @@ -1844,17 +1836,20 @@ class GTEST_API_ ThreadLocal { private: const T value_; // The value for each thread. - GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory); + InstanceValueHolderFactory(const InstanceValueHolderFactory&) = delete; + InstanceValueHolderFactory& operator=(const InstanceValueHolderFactory&) = + delete; }; // A key pthreads uses for looking up per-thread values. const pthread_key_t key_; std::unique_ptr default_factory_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal); + ThreadLocal(const ThreadLocal&) = delete; + ThreadLocal& operator=(const ThreadLocal&) = delete; }; -# endif // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ +#endif // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ #else // GTEST_IS_THREADSAFE @@ -1871,10 +1866,10 @@ class Mutex { void AssertHeld() const {} }; -# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ +#define GTEST_DECLARE_STATIC_MUTEX_(mutex) \ extern ::testing::internal::Mutex mutex -# define GTEST_DEFINE_STATIC_MUTEX_(mutex) ::testing::internal::Mutex mutex +#define GTEST_DEFINE_STATIC_MUTEX_(mutex) ::testing::internal::Mutex mutex // We cannot name this class MutexLock because the ctor declaration would // conflict with a macro named MutexLock, which is defined on some @@ -1897,6 +1892,7 @@ class GTEST_API_ ThreadLocal { const T* pointer() const { return &value_; } const T& get() const { return value_; } void set(const T& value) { value_ = value; } + private: T value_; }; @@ -1908,11 +1904,11 @@ class GTEST_API_ ThreadLocal { GTEST_API_ size_t GetThreadCount(); #if GTEST_OS_WINDOWS -# define GTEST_PATH_SEP_ "\\" -# define GTEST_HAS_ALT_PATH_SEP_ 1 +#define GTEST_PATH_SEP_ "\\" +#define GTEST_HAS_ALT_PATH_SEP_ 1 #else -# define GTEST_PATH_SEP_ "/" -# define GTEST_HAS_ALT_PATH_SEP_ 0 +#define GTEST_PATH_SEP_ "/" +#define GTEST_HAS_ALT_PATH_SEP_ 0 #endif // GTEST_OS_WINDOWS // Utilities for char. @@ -1970,8 +1966,7 @@ inline char ToUpper(char ch) { inline std::string StripTrailingSpaces(std::string str) { std::string::iterator it = str.end(); - while (it != str.begin() && IsSpace(*--it)) - it = str.erase(it); + while (it != str.begin() && IsSpace(*--it)) it = str.erase(it); return str; } @@ -1983,56 +1978,31 @@ inline std::string StripTrailingSpaces(std::string str) { namespace posix { -// Functions with a different name on Windows. - +// File system porting. +#if GTEST_HAS_FILE_SYSTEM #if GTEST_OS_WINDOWS typedef struct _stat StatStruct; -# ifdef __BORLANDC__ -inline int DoIsATTY(int fd) { return isatty(fd); } -inline int StrCaseCmp(const char* s1, const char* s2) { - return stricmp(s1, s2); -} -inline char* StrDup(const char* src) { return strdup(src); } -# else // !__BORLANDC__ -# if GTEST_OS_WINDOWS_MOBILE -inline int DoIsATTY(int /* fd */) { return 0; } -# else -inline int DoIsATTY(int fd) { return _isatty(fd); } -# endif // GTEST_OS_WINDOWS_MOBILE -inline int StrCaseCmp(const char* s1, const char* s2) { - return _stricmp(s1, s2); -} -inline char* StrDup(const char* src) { return _strdup(src); } -# endif // __BORLANDC__ - -# if GTEST_OS_WINDOWS_MOBILE +#if GTEST_OS_WINDOWS_MOBILE inline int FileNo(FILE* file) { return reinterpret_cast(_fileno(file)); } // Stat(), RmDir(), and IsDir() are not needed on Windows CE at this // time and thus not defined there. -# else +#else inline int FileNo(FILE* file) { return _fileno(file); } inline int Stat(const char* path, StatStruct* buf) { return _stat(path, buf); } inline int RmDir(const char* dir) { return _rmdir(dir); } -inline bool IsDir(const StatStruct& st) { - return (_S_IFDIR & st.st_mode) != 0; -} -# endif // GTEST_OS_WINDOWS_MOBILE +inline bool IsDir(const StatStruct& st) { return (_S_IFDIR & st.st_mode) != 0; } +#endif // GTEST_OS_WINDOWS_MOBILE #elif GTEST_OS_ESP8266 typedef struct stat StatStruct; inline int FileNo(FILE* file) { return fileno(file); } -inline int DoIsATTY(int fd) { return isatty(fd); } inline int Stat(const char* path, StatStruct* buf) { // stat function not implemented on ESP8266 return 0; } -inline int StrCaseCmp(const char* s1, const char* s2) { - return strcasecmp(s1, s2); -} -inline char* StrDup(const char* src) { return strdup(src); } inline int RmDir(const char* dir) { return rmdir(dir); } inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); } @@ -2041,14 +2011,56 @@ inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); } typedef struct stat StatStruct; inline int FileNo(FILE* file) { return fileno(file); } -inline int DoIsATTY(int fd) { return isatty(fd); } inline int Stat(const char* path, StatStruct* buf) { return stat(path, buf); } +#if GTEST_OS_QURT +// QuRT doesn't support any directory functions, including rmdir +inline int RmDir(const char*) { return 0; } +#else +inline int RmDir(const char* dir) { return rmdir(dir); } +#endif +inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); } + +#endif // GTEST_OS_WINDOWS +#endif // GTEST_HAS_FILE_SYSTEM + +// Other functions with a different name on Windows. + +#if GTEST_OS_WINDOWS + +#ifdef __BORLANDC__ +inline int DoIsATTY(int fd) { return isatty(fd); } +inline int StrCaseCmp(const char* s1, const char* s2) { + return stricmp(s1, s2); +} +inline char* StrDup(const char* src) { return strdup(src); } +#else // !__BORLANDC__ +#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \ + GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT || defined(ESP_PLATFORM) +inline int DoIsATTY(int /* fd */) { return 0; } +#else +inline int DoIsATTY(int fd) { return _isatty(fd); } +#endif // GTEST_OS_WINDOWS_MOBILE +inline int StrCaseCmp(const char* s1, const char* s2) { + return _stricmp(s1, s2); +} +inline char* StrDup(const char* src) { return _strdup(src); } +#endif // __BORLANDC__ + +#elif GTEST_OS_ESP8266 + +inline int DoIsATTY(int fd) { return isatty(fd); } +inline int StrCaseCmp(const char* s1, const char* s2) { + return strcasecmp(s1, s2); +} +inline char* StrDup(const char* src) { return strdup(src); } + +#else + +inline int DoIsATTY(int fd) { return isatty(fd); } inline int StrCaseCmp(const char* s1, const char* s2) { return strcasecmp(s1, s2); } inline char* StrDup(const char* src) { return strdup(src); } -inline int RmDir(const char* dir) { return rmdir(dir); } -inline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); } #endif // GTEST_OS_WINDOWS @@ -2070,9 +2082,10 @@ GTEST_DISABLE_MSC_DEPRECATED_PUSH_() // ChDir(), FReopen(), FDOpen(), Read(), Write(), Close(), and // StrError() aren't needed on Windows CE at this time and thus not // defined there. - -#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && \ - !GTEST_OS_WINDOWS_RT && !GTEST_OS_ESP8266 && !GTEST_OS_XTENSA +#if GTEST_HAS_FILE_SYSTEM +#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && \ + !GTEST_OS_WINDOWS_RT && !GTEST_OS_ESP8266 && !GTEST_OS_XTENSA && \ + !GTEST_OS_QURT inline int ChDir(const char* dir) { return chdir(dir); } #endif inline FILE* FOpen(const char* path, const char* mode) { @@ -2082,18 +2095,18 @@ inline FILE* FOpen(const char* path, const char* mode) { std::wstring wide_path = converter.from_bytes(path); std::wstring wide_mode = converter.from_bytes(mode); return _wfopen(wide_path.c_str(), wide_mode.c_str()); -#else // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW +#else // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW return fopen(path, mode); #endif // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW } -#if !GTEST_OS_WINDOWS_MOBILE -inline FILE *FReopen(const char* path, const char* mode, FILE* stream) { +#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT +inline FILE* FReopen(const char* path, const char* mode, FILE* stream) { return freopen(path, mode, stream); } inline FILE* FDOpen(int fd, const char* mode) { return fdopen(fd, mode); } -#endif +#endif // !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT inline int FClose(FILE* fp) { return fclose(fp); } -#if !GTEST_OS_WINDOWS_MOBILE +#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT inline int Read(int fd, void* buf, unsigned int count) { return static_cast(read(fd, buf, count)); } @@ -2101,11 +2114,17 @@ inline int Write(int fd, const void* buf, unsigned int count) { return static_cast(write(fd, buf, count)); } inline int Close(int fd) { return close(fd); } +#endif // !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT +#endif // GTEST_HAS_FILE_SYSTEM + +#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT inline const char* StrError(int errnum) { return strerror(errnum); } -#endif +#endif // !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_QURT + inline const char* GetEnv(const char* name) { -#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \ - GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_XTENSA +#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \ + GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_XTENSA || \ + GTEST_OS_QURT // We are on an embedded platform, which has no environment variables. static_cast(name); // To prevent 'unused argument' warning. return nullptr; @@ -2137,15 +2156,15 @@ GTEST_DISABLE_MSC_DEPRECATED_POP_() // MSVC-based platforms. We map the GTEST_SNPRINTF_ macro to the appropriate // function in order to achieve that. We use macro definition here because // snprintf is a variadic function. -#if _MSC_VER && !GTEST_OS_WINDOWS_MOBILE +#if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE // MSVC 2005 and above support variadic macros. -# define GTEST_SNPRINTF_(buffer, size, format, ...) \ - _snprintf_s(buffer, size, size, format, __VA_ARGS__) +#define GTEST_SNPRINTF_(buffer, size, format, ...) \ + _snprintf_s(buffer, size, size, format, __VA_ARGS__) #elif defined(_MSC_VER) // Windows CE does not define _snprintf_s -# define GTEST_SNPRINTF_ _snprintf +#define GTEST_SNPRINTF_ _snprintf #else -# define GTEST_SNPRINTF_ snprintf +#define GTEST_SNPRINTF_ snprintf #endif // The biggest signed integer type the compiler supports. @@ -2205,55 +2224,84 @@ using TimeInMillis = int64_t; // Represents time in milliseconds. // Macro for referencing flags. #if !defined(GTEST_FLAG) -# define GTEST_FLAG(name) FLAGS_gtest_##name +#define GTEST_FLAG_NAME_(name) gtest_##name +#define GTEST_FLAG(name) FLAGS_gtest_##name #endif // !defined(GTEST_FLAG) -#if !defined(GTEST_USE_OWN_FLAGFILE_FLAG_) -# define GTEST_USE_OWN_FLAGFILE_FLAG_ 1 -#endif // !defined(GTEST_USE_OWN_FLAGFILE_FLAG_) +// Pick a command line flags implementation. +#if GTEST_HAS_ABSL -#if !defined(GTEST_DECLARE_bool_) -# define GTEST_FLAG_SAVER_ ::testing::internal::GTestFlagSaver +// Macros for defining flags. +#define GTEST_DEFINE_bool_(name, default_val, doc) \ + ABSL_FLAG(bool, GTEST_FLAG_NAME_(name), default_val, doc) +#define GTEST_DEFINE_int32_(name, default_val, doc) \ + ABSL_FLAG(int32_t, GTEST_FLAG_NAME_(name), default_val, doc) +#define GTEST_DEFINE_string_(name, default_val, doc) \ + ABSL_FLAG(std::string, GTEST_FLAG_NAME_(name), default_val, doc) // Macros for declaring flags. -#define GTEST_DECLARE_bool_(name) \ - namespace testing { \ - GTEST_API_ extern bool GTEST_FLAG(name); \ - } -#define GTEST_DECLARE_int32_(name) \ - namespace testing { \ - GTEST_API_ extern std::int32_t GTEST_FLAG(name); \ - } -#define GTEST_DECLARE_string_(name) \ - namespace testing { \ - GTEST_API_ extern ::std::string GTEST_FLAG(name); \ - } +#define GTEST_DECLARE_bool_(name) \ + ABSL_DECLARE_FLAG(bool, GTEST_FLAG_NAME_(name)) +#define GTEST_DECLARE_int32_(name) \ + ABSL_DECLARE_FLAG(int32_t, GTEST_FLAG_NAME_(name)) +#define GTEST_DECLARE_string_(name) \ + ABSL_DECLARE_FLAG(std::string, GTEST_FLAG_NAME_(name)) + +#define GTEST_FLAG_SAVER_ ::absl::FlagSaver + +#define GTEST_FLAG_GET(name) ::absl::GetFlag(GTEST_FLAG(name)) +#define GTEST_FLAG_SET(name, value) \ + (void)(::absl::SetFlag(>EST_FLAG(name), value)) +#define GTEST_USE_OWN_FLAGFILE_FLAG_ 0 + +#else // GTEST_HAS_ABSL // Macros for defining flags. #define GTEST_DEFINE_bool_(name, default_val, doc) \ namespace testing { \ GTEST_API_ bool GTEST_FLAG(name) = (default_val); \ - } + } \ + static_assert(true, "no-op to require trailing semicolon") #define GTEST_DEFINE_int32_(name, default_val, doc) \ namespace testing { \ GTEST_API_ std::int32_t GTEST_FLAG(name) = (default_val); \ - } + } \ + static_assert(true, "no-op to require trailing semicolon") #define GTEST_DEFINE_string_(name, default_val, doc) \ namespace testing { \ GTEST_API_ ::std::string GTEST_FLAG(name) = (default_val); \ - } + } \ + static_assert(true, "no-op to require trailing semicolon") -#endif // !defined(GTEST_DECLARE_bool_) +// Macros for declaring flags. +#define GTEST_DECLARE_bool_(name) \ + namespace testing { \ + GTEST_API_ extern bool GTEST_FLAG(name); \ + } \ + static_assert(true, "no-op to require trailing semicolon") +#define GTEST_DECLARE_int32_(name) \ + namespace testing { \ + GTEST_API_ extern std::int32_t GTEST_FLAG(name); \ + } \ + static_assert(true, "no-op to require trailing semicolon") +#define GTEST_DECLARE_string_(name) \ + namespace testing { \ + GTEST_API_ extern ::std::string GTEST_FLAG(name); \ + } \ + static_assert(true, "no-op to require trailing semicolon") + +#define GTEST_FLAG_SAVER_ ::testing::internal::GTestFlagSaver -#if !defined(GTEST_FLAG_GET) #define GTEST_FLAG_GET(name) ::testing::GTEST_FLAG(name) #define GTEST_FLAG_SET(name, value) (void)(::testing::GTEST_FLAG(name) = value) -#endif // !defined(GTEST_FLAG_GET) +#define GTEST_USE_OWN_FLAGFILE_FLAG_ 1 + +#endif // GTEST_HAS_ABSL // Thread annotations #if !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_) -# define GTEST_EXCLUSIVE_LOCK_REQUIRED_(locks) -# define GTEST_LOCK_EXCLUDED_(locks) +#define GTEST_EXCLUSIVE_LOCK_REQUIRED_(locks) +#define GTEST_LOCK_EXCLUDED_(locks) #endif // !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_) // Parses 'str' for a 32-bit signed integer. If successful, writes the result @@ -2329,6 +2377,7 @@ namespace testing { namespace internal { template using Optional = ::absl::optional; +inline ::absl::nullopt_t Nullopt() { return ::absl::nullopt; } } // namespace internal } // namespace testing #else @@ -2342,6 +2391,7 @@ namespace testing { namespace internal { template using Optional = ::std::optional; +inline ::std::nullopt_t Nullopt() { return ::std::nullopt; } } // namespace internal } // namespace testing // The case where absl is configured NOT to alias std::optional is not @@ -2353,7 +2403,7 @@ using Optional = ::std::optional; #if GTEST_HAS_ABSL // Always use absl::string_view for Matcher<> specializations if googletest // is built with absl support. -# define GTEST_INTERNAL_HAS_STRING_VIEW 1 +#define GTEST_INTERNAL_HAS_STRING_VIEW 1 #include "absl/strings/string_view.h" namespace testing { namespace internal { @@ -2361,11 +2411,11 @@ using StringView = ::absl::string_view; } // namespace internal } // namespace testing #else -# ifdef __has_include -# if __has_include() && __cplusplus >= 201703L +#ifdef __has_include +#if __has_include() && __cplusplus >= 201703L // Otherwise for C++17 and higher use std::string_view for Matcher<> // specializations. -# define GTEST_INTERNAL_HAS_STRING_VIEW 1 +#define GTEST_INTERNAL_HAS_STRING_VIEW 1 #include namespace testing { namespace internal { @@ -2374,8 +2424,8 @@ using StringView = ::std::string_view; } // namespace testing // The case where absl is configured NOT to alias std::string_view is not // supported. -# endif // __has_include() && __cplusplus >= 201703L -# endif // __has_include +#endif // __has_include() && __cplusplus >= 201703L +#endif // __has_include #endif // GTEST_HAS_ABSL #if GTEST_HAS_ABSL diff --git a/deps/googletest/include/gtest/internal/gtest-string.h b/deps/googletest/include/gtest/internal/gtest-string.h index 10f774f96606e5..cc0dd7529c1654 100644 --- a/deps/googletest/include/gtest/internal/gtest-string.h +++ b/deps/googletest/include/gtest/internal/gtest-string.h @@ -26,7 +26,7 @@ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// + // The Google C++ Testing and Mocking Framework (Google Test) // // This header file declares the String class and functions used internally by @@ -36,18 +36,22 @@ // This header file is #included by gtest-internal.h. // It should not be #included by other files. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_ #ifdef __BORLANDC__ // string.h is not guaranteed to provide strcpy on C++ Builder. -# include +#include #endif #include + #include +#include #include #include "gtest/internal/gtest-port.h" @@ -123,8 +127,7 @@ class GTEST_API_ String { // Unlike strcasecmp(), this function can handle NULL argument(s). // A NULL C string is considered different to any non-NULL C string, // including the empty string. - static bool CaseInsensitiveCStringEquals(const char* lhs, - const char* rhs); + static bool CaseInsensitiveCStringEquals(const char* lhs, const char* rhs); // Compares two wide C strings, ignoring case. Returns true if and only if // they have the same content. @@ -143,8 +146,8 @@ class GTEST_API_ String { // Returns true if and only if the given string ends with the given suffix, // ignoring case. Any string is considered to end with an empty suffix. - static bool EndsWithCaseInsensitive( - const std::string& str, const std::string& suffix); + static bool EndsWithCaseInsensitive(const std::string& str, + const std::string& suffix); // Formats an int value as "%02d". static std::string FormatIntWidth2(int value); // "%02d" for width == 2 @@ -163,7 +166,7 @@ class GTEST_API_ String { private: String(); // Not meant to be instantiated. -}; // class String +}; // class String // Gets the content of the stringstream's buffer as an std::string. Each '\0' // character in the buffer is replaced with "\\0". diff --git a/deps/googletest/include/gtest/internal/gtest-type-util.h b/deps/googletest/include/gtest/internal/gtest-type-util.h index b87a2e2cace33b..17a470b62658a4 100644 --- a/deps/googletest/include/gtest/internal/gtest-type-util.h +++ b/deps/googletest/include/gtest/internal/gtest-type-util.h @@ -30,20 +30,26 @@ // Type utilities needed for implementing typed and type-parameterized // tests. -// GOOGLETEST_CM0001 DO NOT DELETE +// IWYU pragma: private, include "gtest/gtest.h" +// IWYU pragma: friend gtest/.* +// IWYU pragma: friend gmock/.* #ifndef GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_ #define GOOGLETEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_ +#include +#include +#include + #include "gtest/internal/gtest-port.h" // #ifdef __GNUC__ is too general here. It is possible to use gcc without using // libstdc++ (which is where cxxabi.h comes from). -# if GTEST_HAS_CXXABI_H_ -# include -# elif defined(__HP_aCC) -# include -# endif // GTEST_HASH_CXXABI_H_ +#if GTEST_HAS_CXXABI_H_ +#include +#elif defined(__HP_aCC) +#include +#endif // GTEST_HASH_CXXABI_H_ namespace testing { namespace internal { @@ -101,7 +107,9 @@ std::string GetTypeName() { // A unique type indicating an empty node struct None {}; -# define GTEST_TEMPLATE_ template class +#define GTEST_TEMPLATE_ \ + template \ + class // The template "selector" struct TemplateSel is used to // represent Tmpl, which must be a class template with one type @@ -119,8 +127,7 @@ struct TemplateSel { }; }; -# define GTEST_BIND_(TmplSel, T) \ - TmplSel::template Bind::type +#define GTEST_BIND_(TmplSel, T) TmplSel::template Bind::type template struct Templates { diff --git a/deps/googletest/src/gtest-all.cc b/deps/googletest/src/gtest-all.cc index ad292905cf381d..2a70ed88c7841a 100644 --- a/deps/googletest/src/gtest-all.cc +++ b/deps/googletest/src/gtest-all.cc @@ -38,7 +38,7 @@ #include "gtest/gtest.h" // The following lines pull in the real gtest *.cc files. -#include "src/gtest.cc" +#include "src/gtest-assertion-result.cc" #include "src/gtest-death-test.cc" #include "src/gtest-filepath.cc" #include "src/gtest-matchers.cc" @@ -46,3 +46,4 @@ #include "src/gtest-printers.cc" #include "src/gtest-test-part.cc" #include "src/gtest-typed-test.cc" +#include "src/gtest.cc" diff --git a/deps/googletest/src/gtest-assertion-result.cc b/deps/googletest/src/gtest-assertion-result.cc new file mode 100644 index 00000000000000..f1c0b10dc9e501 --- /dev/null +++ b/deps/googletest/src/gtest-assertion-result.cc @@ -0,0 +1,77 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// The Google C++ Testing and Mocking Framework (Google Test) +// +// This file defines the AssertionResult type. + +#include "gtest/gtest-assertion-result.h" + +#include +#include + +#include "gtest/gtest-message.h" + +namespace testing { + +// AssertionResult constructors. +// Used in EXPECT_TRUE/FALSE(assertion_result). +AssertionResult::AssertionResult(const AssertionResult& other) + : success_(other.success_), + message_(other.message_.get() != nullptr + ? new ::std::string(*other.message_) + : static_cast< ::std::string*>(nullptr)) {} + +// Swaps two AssertionResults. +void AssertionResult::swap(AssertionResult& other) { + using std::swap; + swap(success_, other.success_); + swap(message_, other.message_); +} + +// Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE. +AssertionResult AssertionResult::operator!() const { + AssertionResult negation(!success_); + if (message_.get() != nullptr) negation << *message_; + return negation; +} + +// Makes a successful assertion result. +AssertionResult AssertionSuccess() { return AssertionResult(true); } + +// Makes a failed assertion result. +AssertionResult AssertionFailure() { return AssertionResult(false); } + +// Makes a failed assertion result with the given failure message. +// Deprecated; use AssertionFailure() << message. +AssertionResult AssertionFailure(const Message& message) { + return AssertionFailure() << message; +} + +} // namespace testing diff --git a/deps/googletest/src/gtest-death-test.cc b/deps/googletest/src/gtest-death-test.cc index 52af2c795e3e63..b6968a9c949db5 100644 --- a/deps/googletest/src/gtest-death-test.cc +++ b/deps/googletest/src/gtest-death-test.cc @@ -35,49 +35,49 @@ #include #include -#include "gtest/internal/gtest-port.h" #include "gtest/internal/custom/gtest.h" +#include "gtest/internal/gtest-port.h" #if GTEST_HAS_DEATH_TEST -# if GTEST_OS_MAC -# include -# endif // GTEST_OS_MAC - -# include -# include -# include - -# if GTEST_OS_LINUX -# include -# endif // GTEST_OS_LINUX - -# include - -# if GTEST_OS_WINDOWS -# include -# else -# include -# include -# endif // GTEST_OS_WINDOWS - -# if GTEST_OS_QNX -# include -# endif // GTEST_OS_QNX - -# if GTEST_OS_FUCHSIA -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# include -# endif // GTEST_OS_FUCHSIA +#if GTEST_OS_MAC +#include +#endif // GTEST_OS_MAC + +#include +#include +#include + +#if GTEST_OS_LINUX +#include +#endif // GTEST_OS_LINUX + +#include + +#if GTEST_OS_WINDOWS +#include +#else +#include +#include +#endif // GTEST_OS_WINDOWS + +#if GTEST_OS_QNX +#include +#endif // GTEST_OS_QNX + +#if GTEST_OS_FUCHSIA +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#endif // GTEST_OS_FUCHSIA #endif // GTEST_HAS_DEATH_TEST @@ -137,9 +137,9 @@ namespace internal { // Valid only for fast death tests. Indicates the code is running in the // child process of a fast style death test. -# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA static bool g_in_fast_death_test_child = false; -# endif +#endif // Returns a Boolean value indicating whether the caller is currently // executing in the context of the death test child process. Tools such as @@ -147,13 +147,13 @@ static bool g_in_fast_death_test_child = false; // tests. IMPORTANT: This is an internal utility. Using it may break the // implementation of death tests. User code MUST NOT use it. bool InDeathTestChild() { -# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA +#if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA // On Windows and Fuchsia, death tests are thread-safe regardless of the value // of the death_test_style flag. return !GTEST_FLAG_GET(internal_run_death_test).empty(); -# else +#else if (GTEST_FLAG_GET(death_test_style) == "threadsafe") return !GTEST_FLAG_GET(internal_run_death_test).empty(); @@ -165,40 +165,38 @@ bool InDeathTestChild() { } // namespace internal // ExitedWithCode constructor. -ExitedWithCode::ExitedWithCode(int exit_code) : exit_code_(exit_code) { -} +ExitedWithCode::ExitedWithCode(int exit_code) : exit_code_(exit_code) {} // ExitedWithCode function-call operator. bool ExitedWithCode::operator()(int exit_status) const { -# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA +#if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA return exit_status == exit_code_; -# else +#else return WIFEXITED(exit_status) && WEXITSTATUS(exit_status) == exit_code_; -# endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA +#endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA } -# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA // KilledBySignal constructor. -KilledBySignal::KilledBySignal(int signum) : signum_(signum) { -} +KilledBySignal::KilledBySignal(int signum) : signum_(signum) {} // KilledBySignal function-call operator. bool KilledBySignal::operator()(int exit_status) const { -# if defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_) +#if defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_) { bool result; if (GTEST_KILLED_BY_SIGNAL_OVERRIDE_(signum_, exit_status, &result)) { return result; } } -# endif // defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_) +#endif // defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_) return WIFSIGNALED(exit_status) && WTERMSIG(exit_status) == signum_; } -# endif // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#endif // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA namespace internal { @@ -209,23 +207,23 @@ namespace internal { static std::string ExitSummary(int exit_code) { Message m; -# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA +#if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA m << "Exited with exit status " << exit_code; -# else +#else if (WIFEXITED(exit_code)) { m << "Exited with exit status " << WEXITSTATUS(exit_code); } else if (WIFSIGNALED(exit_code)) { m << "Terminated by signal " << WTERMSIG(exit_code); } -# ifdef WCOREDUMP +#ifdef WCOREDUMP if (WCOREDUMP(exit_code)) { m << " (core dumped)"; } -# endif -# endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA +#endif +#endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA return m.GetString(); } @@ -236,7 +234,7 @@ bool ExitedUnsuccessfully(int exit_status) { return !ExitedWithCode(0)(exit_status); } -# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA // Generates a textual failure message when a death test finds more than // one thread running, or cannot determine the number of threads, prior // to executing the given statement. It is the responsibility of the @@ -251,13 +249,13 @@ static std::string DeathTestThreadWarning(size_t thread_count) { msg << "detected " << thread_count << " threads."; } msg << " See " - "https://github.com/google/googletest/blob/master/docs/" + "https://github.com/google/googletest/blob/main/docs/" "advanced.md#death-tests-and-threads" << " for more explanation and suggested solutions, especially if" << " this is the last message you see before your test times out."; return msg.GetString(); } -# endif // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA +#endif // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA // Flag characters for reporting a death test that did not die. static const char kDeathTestLived = 'L'; @@ -286,7 +284,7 @@ enum DeathTestOutcome { IN_PROGRESS, DIED, LIVED, RETURNED, THREW }; // message is propagated back to the parent process. Otherwise, the // message is simply printed to stderr. In either case, the program // then exits with status 1. -static void DeathTestAbort(const std::string& message) { +[[noreturn]] static void DeathTestAbort(const std::string& message) { // On a POSIX system, this function may be called from a threadsafe-style // death test child process, which operates on a very small stack. Use // the heap for any additional non-minuscule memory requirements. @@ -307,14 +305,14 @@ static void DeathTestAbort(const std::string& message) { // A replacement for CHECK that calls DeathTestAbort if the assertion // fails. -# define GTEST_DEATH_TEST_CHECK_(expression) \ - do { \ - if (!::testing::internal::IsTrue(expression)) { \ - DeathTestAbort( \ - ::std::string("CHECK failed: File ") + __FILE__ + ", line " \ - + ::testing::internal::StreamableToString(__LINE__) + ": " \ - + #expression); \ - } \ +#define GTEST_DEATH_TEST_CHECK_(expression) \ + do { \ + if (!::testing::internal::IsTrue(expression)) { \ + DeathTestAbort(::std::string("CHECK failed: File ") + __FILE__ + \ + ", line " + \ + ::testing::internal::StreamableToString(__LINE__) + \ + ": " + #expression); \ + } \ } while (::testing::internal::AlwaysFalse()) // This macro is similar to GTEST_DEATH_TEST_CHECK_, but it is meant for @@ -324,23 +322,23 @@ static void DeathTestAbort(const std::string& message) { // evaluates the expression as long as it evaluates to -1 and sets // errno to EINTR. If the expression evaluates to -1 but errno is // something other than EINTR, DeathTestAbort is called. -# define GTEST_DEATH_TEST_CHECK_SYSCALL_(expression) \ - do { \ - int gtest_retval; \ - do { \ - gtest_retval = (expression); \ - } while (gtest_retval == -1 && errno == EINTR); \ - if (gtest_retval == -1) { \ - DeathTestAbort( \ - ::std::string("CHECK failed: File ") + __FILE__ + ", line " \ - + ::testing::internal::StreamableToString(__LINE__) + ": " \ - + #expression + " != -1"); \ - } \ +#define GTEST_DEATH_TEST_CHECK_SYSCALL_(expression) \ + do { \ + int gtest_retval; \ + do { \ + gtest_retval = (expression); \ + } while (gtest_retval == -1 && errno == EINTR); \ + if (gtest_retval == -1) { \ + DeathTestAbort(::std::string("CHECK failed: File ") + __FILE__ + \ + ", line " + \ + ::testing::internal::StreamableToString(__LINE__) + \ + ": " + #expression + " != -1"); \ + } \ } while (::testing::internal::AlwaysFalse()) // Returns the message describing the last system error in errno. std::string GetLastErrnoDescription() { - return errno == 0 ? "" : posix::StrError(errno); + return errno == 0 ? "" : posix::StrError(errno); } // This is called from a death test parent process to read a failure @@ -373,8 +371,9 @@ static void FailFromInternalError(int fd) { DeathTest::DeathTest() { TestInfo* const info = GetUnitTestImpl()->current_test_info(); if (info == nullptr) { - DeathTestAbort("Cannot run a death test outside of a TEST or " - "TEST_F construct"); + DeathTestAbort( + "Cannot run a death test outside of a TEST or " + "TEST_F construct"); } } @@ -503,9 +502,7 @@ void DeathTestImpl::ReadAndInterpretStatusByte() { set_read_fd(-1); } -std::string DeathTestImpl::GetErrorLogs() { - return GetCapturedStderr(); -} +std::string DeathTestImpl::GetErrorLogs() { return GetCapturedStderr(); } // Signals that the death test code which should have exited, didn't. // Should be called only in a death test child process. @@ -515,9 +512,9 @@ void DeathTestImpl::Abort(AbortReason reason) { // The parent process considers the death test to be a failure if // it finds any data in our pipe. So, here we write a single flag byte // to the pipe, then exit. - const char status_ch = - reason == TEST_DID_NOT_DIE ? kDeathTestLived : - reason == TEST_THREW_EXCEPTION ? kDeathTestThrew : kDeathTestReturned; + const char status_ch = reason == TEST_DID_NOT_DIE ? kDeathTestLived + : reason == TEST_THREW_EXCEPTION ? kDeathTestThrew + : kDeathTestReturned; GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Write(write_fd(), &status_ch, 1)); // We are leaking the descriptor here because on some platforms (i.e., @@ -536,7 +533,7 @@ void DeathTestImpl::Abort(AbortReason reason) { // much easier. static ::std::string FormatDeathTestOutput(const ::std::string& output) { ::std::string ret; - for (size_t at = 0; ; ) { + for (size_t at = 0;;) { const size_t line_end = output.find('\n', at); ret += "[ DEATH ] "; if (line_end == ::std::string::npos) { @@ -571,8 +568,7 @@ static ::std::string FormatDeathTestOutput(const ::std::string& output) { // the first failing condition, in the order given above, is the one that is // reported. Also sets the last death test message string. bool DeathTestImpl::Passed(bool status_ok) { - if (!spawned()) - return false; + if (!spawned()) return false; const std::string error_message = GetErrorLogs(); @@ -583,15 +579,18 @@ bool DeathTestImpl::Passed(bool status_ok) { switch (outcome()) { case LIVED: buffer << " Result: failed to die.\n" - << " Error msg:\n" << FormatDeathTestOutput(error_message); + << " Error msg:\n" + << FormatDeathTestOutput(error_message); break; case THREW: buffer << " Result: threw an exception.\n" - << " Error msg:\n" << FormatDeathTestOutput(error_message); + << " Error msg:\n" + << FormatDeathTestOutput(error_message); break; case RETURNED: buffer << " Result: illegal return in test statement.\n" - << " Error msg:\n" << FormatDeathTestOutput(error_message); + << " Error msg:\n" + << FormatDeathTestOutput(error_message); break; case DIED: if (status_ok) { @@ -608,7 +607,8 @@ bool DeathTestImpl::Passed(bool status_ok) { } else { buffer << " Result: died but not with expected exit code:\n" << " " << ExitSummary(status()) << "\n" - << "Actual msg:\n" << FormatDeathTestOutput(error_message); + << "Actual msg:\n" + << FormatDeathTestOutput(error_message); } break; case IN_PROGRESS: @@ -621,7 +621,7 @@ bool DeathTestImpl::Passed(bool status_ok) { return success; } -# if GTEST_OS_WINDOWS +#if GTEST_OS_WINDOWS // WindowsDeathTest implements death tests on Windows. Due to the // specifics of starting new processes on Windows, death tests there are // always threadsafe, and Google Test considers the @@ -682,14 +682,12 @@ class WindowsDeathTest : public DeathTestImpl { // status, or 0 if no child process exists. As a side effect, sets the // outcome data member. int WindowsDeathTest::Wait() { - if (!spawned()) - return 0; + if (!spawned()) return 0; // Wait until the child either signals that it has acquired the write end // of the pipe or it dies. - const HANDLE wait_handles[2] = { child_handle_.Get(), event_handle_.Get() }; - switch (::WaitForMultipleObjects(2, - wait_handles, + const HANDLE wait_handles[2] = {child_handle_.Get(), event_handle_.Get()}; + switch (::WaitForMultipleObjects(2, wait_handles, FALSE, // Waits for any of the handles. INFINITE)) { case WAIT_OBJECT_0: @@ -710,9 +708,8 @@ int WindowsDeathTest::Wait() { // returns immediately if the child has already exited, regardless of // whether previous calls to WaitForMultipleObjects synchronized on this // handle or not. - GTEST_DEATH_TEST_CHECK_( - WAIT_OBJECT_0 == ::WaitForSingleObject(child_handle_.Get(), - INFINITE)); + GTEST_DEATH_TEST_CHECK_(WAIT_OBJECT_0 == + ::WaitForSingleObject(child_handle_.Get(), INFINITE)); DWORD status_code; GTEST_DEATH_TEST_CHECK_( ::GetExitCodeProcess(child_handle_.Get(), &status_code) != FALSE); @@ -745,12 +742,12 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() { SECURITY_ATTRIBUTES handles_are_inheritable = {sizeof(SECURITY_ATTRIBUTES), nullptr, TRUE}; HANDLE read_handle, write_handle; - GTEST_DEATH_TEST_CHECK_( - ::CreatePipe(&read_handle, &write_handle, &handles_are_inheritable, - 0) // Default buffer size. - != FALSE); - set_read_fd(::_open_osfhandle(reinterpret_cast(read_handle), - O_RDONLY)); + GTEST_DEATH_TEST_CHECK_(::CreatePipe(&read_handle, &write_handle, + &handles_are_inheritable, + 0) // Default buffer size. + != FALSE); + set_read_fd( + ::_open_osfhandle(reinterpret_cast(read_handle), O_RDONLY)); write_handle_.Reset(write_handle); event_handle_.Reset(::CreateEvent( &handles_are_inheritable, @@ -777,9 +774,8 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() { executable_path, _MAX_PATH)); - std::string command_line = - std::string(::GetCommandLineA()) + " " + filter_flag + " \"" + - internal_flag + "\""; + std::string command_line = std::string(::GetCommandLineA()) + " " + + filter_flag + " \"" + internal_flag + "\""; DeathTest::set_last_death_test_message(""); @@ -799,8 +795,8 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() { GTEST_DEATH_TEST_CHECK_( ::CreateProcessA( executable_path, const_cast(command_line.c_str()), - nullptr, // Retuned process handle is not inheritable. - nullptr, // Retuned thread handle is not inheritable. + nullptr, // Returned process handle is not inheritable. + nullptr, // Returned thread handle is not inheritable. TRUE, // Child inherits all inheritable handles (for write_handle_). 0x0, // Default creation flags. nullptr, // Inherit the parent's environment. @@ -812,7 +808,7 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() { return OVERSEE_TEST; } -# elif GTEST_OS_FUCHSIA +#elif GTEST_OS_FUCHSIA class FuchsiaDeathTest : public DeathTestImpl { public: @@ -858,18 +854,13 @@ class Arguments { template void AddArguments(const ::std::vector& arguments) { for (typename ::std::vector::const_iterator i = arguments.begin(); - i != arguments.end(); - ++i) { + i != arguments.end(); ++i) { args_.insert(args_.end() - 1, posix::StrDup(i->c_str())); } } - char* const* Argv() { - return &args_[0]; - } + char* const* Argv() { return &args_[0]; } - int size() { - return static_cast(args_.size()) - 1; - } + int size() { return static_cast(args_.size()) - 1; } private: std::vector args_; @@ -883,8 +874,7 @@ int FuchsiaDeathTest::Wait() { const int kSocketKey = 1; const int kExceptionKey = 2; - if (!spawned()) - return 0; + if (!spawned()) return 0; // Create a port to wait for socket/task/exception events. zx_status_t status_zx; @@ -893,8 +883,8 @@ int FuchsiaDeathTest::Wait() { GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK); // Register to wait for the child process to terminate. - status_zx = child_process_.wait_async( - port, kProcessKey, ZX_PROCESS_TERMINATED, 0); + status_zx = + child_process_.wait_async(port, kProcessKey, ZX_PROCESS_TERMINATED, 0); GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK); // Register to wait for the socket to be readable or closed. @@ -903,8 +893,8 @@ int FuchsiaDeathTest::Wait() { GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK); // Register to wait for an exception. - status_zx = exception_channel_.wait_async( - port, kExceptionKey, ZX_CHANNEL_READABLE, 0); + status_zx = exception_channel_.wait_async(port, kExceptionKey, + ZX_CHANNEL_READABLE, 0); GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK); bool process_terminated = false; @@ -934,9 +924,9 @@ int FuchsiaDeathTest::Wait() { size_t old_length = captured_stderr_.length(); size_t bytes_read = 0; captured_stderr_.resize(old_length + kBufferSize); - status_zx = stderr_socket_.read( - 0, &captured_stderr_.front() + old_length, kBufferSize, - &bytes_read); + status_zx = + stderr_socket_.read(0, &captured_stderr_.front() + old_length, + kBufferSize, &bytes_read); captured_stderr_.resize(old_length + bytes_read); } while (status_zx == ZX_OK); if (status_zx == ZX_ERR_PEER_CLOSED) { @@ -992,11 +982,10 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() { const std::string filter_flag = std::string("--") + GTEST_FLAG_PREFIX_ + "filter=" + info->test_suite_name() + "." + info->name(); - const std::string internal_flag = - std::string("--") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag + "=" - + file_ + "|" - + StreamableToString(line_) + "|" - + StreamableToString(death_test_index); + const std::string internal_flag = std::string("--") + GTEST_FLAG_PREFIX_ + + kInternalRunDeathTestFlag + "=" + file_ + + "|" + StreamableToString(line_) + "|" + + StreamableToString(death_test_index); Arguments args; args.AddArguments(GetInjectableArgvs()); args.AddArgument(filter_flag.c_str()); @@ -1019,8 +1008,7 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() { // Create a socket pair will be used to receive the child process' stderr. zx::socket stderr_producer_socket; - status = - zx::socket::create(0, &stderr_producer_socket, &stderr_socket_); + status = zx::socket::create(0, &stderr_producer_socket, &stderr_socket_); GTEST_DEATH_TEST_CHECK_(status >= 0); int stderr_producer_fd = -1; status = @@ -1037,35 +1025,32 @@ DeathTest::TestRole FuchsiaDeathTest::AssumeRole() { // Create a child job. zx_handle_t child_job = ZX_HANDLE_INVALID; - status = zx_job_create(zx_job_default(), 0, & child_job); + status = zx_job_create(zx_job_default(), 0, &child_job); GTEST_DEATH_TEST_CHECK_(status == ZX_OK); zx_policy_basic_t policy; policy.condition = ZX_POL_NEW_ANY; policy.policy = ZX_POL_ACTION_ALLOW; - status = zx_job_set_policy( - child_job, ZX_JOB_POL_RELATIVE, ZX_JOB_POL_BASIC, &policy, 1); + status = zx_job_set_policy(child_job, ZX_JOB_POL_RELATIVE, ZX_JOB_POL_BASIC, + &policy, 1); GTEST_DEATH_TEST_CHECK_(status == ZX_OK); // Create an exception channel attached to the |child_job|, to allow // us to suppress the system default exception handler from firing. - status = - zx_task_create_exception_channel( - child_job, 0, exception_channel_.reset_and_get_address()); + status = zx_task_create_exception_channel( + child_job, 0, exception_channel_.reset_and_get_address()); GTEST_DEATH_TEST_CHECK_(status == ZX_OK); // Spawn the child process. - status = fdio_spawn_etc( - child_job, FDIO_SPAWN_CLONE_ALL, args.Argv()[0], args.Argv(), nullptr, - 2, spawn_actions, child_process_.reset_and_get_address(), nullptr); + status = fdio_spawn_etc(child_job, FDIO_SPAWN_CLONE_ALL, args.Argv()[0], + args.Argv(), nullptr, 2, spawn_actions, + child_process_.reset_and_get_address(), nullptr); GTEST_DEATH_TEST_CHECK_(status == ZX_OK); set_spawned(true); return OVERSEE_TEST; } -std::string FuchsiaDeathTest::GetErrorLogs() { - return captured_stderr_; -} +std::string FuchsiaDeathTest::GetErrorLogs() { return captured_stderr_; } #else // We are neither on Windows, nor on Fuchsia. @@ -1096,8 +1081,7 @@ ForkingDeathTest::ForkingDeathTest(const char* a_statement, // status, or 0 if no child process exists. As a side effect, sets the // outcome data member. int ForkingDeathTest::Wait() { - if (!spawned()) - return 0; + if (!spawned()) return 0; ReadAndInterpretStatusByte(); @@ -1176,11 +1160,11 @@ class ExecDeathTest : public ForkingDeathTest { private: static ::std::vector GetArgvsForDeathTestChildProcess() { ::std::vector args = GetInjectableArgvs(); -# if defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_) +#if defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_) ::std::vector extra_args = GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_(); args.insert(args.end(), extra_args.begin(), extra_args.end()); -# endif // defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_) +#endif // defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_) return args; } // The name of the file in which the death test is located. @@ -1207,14 +1191,11 @@ class Arguments { template void AddArguments(const ::std::vector& arguments) { for (typename ::std::vector::const_iterator i = arguments.begin(); - i != arguments.end(); - ++i) { + i != arguments.end(); ++i) { args_.insert(args_.end() - 1, posix::StrDup(i->c_str())); } } - char* const* Argv() { - return &args_[0]; - } + char* const* Argv() { return &args_[0]; } private: std::vector args_; @@ -1227,9 +1208,9 @@ struct ExecDeathTestArgs { int close_fd; // File descriptor to close; the read end of a pipe }; -# if GTEST_OS_QNX +#if GTEST_OS_QNX extern "C" char** environ; -# else // GTEST_OS_QNX +#else // GTEST_OS_QNX // The main function for a threadsafe-style death test child process. // This function is called in a clone()-ed process and thus must avoid // any potentially unsafe operations like malloc or libc functions. @@ -1244,8 +1225,8 @@ static int ExecDeathTestChildMain(void* child_arg) { UnitTest::GetInstance()->original_working_dir(); // We can safely call chdir() as it's a direct system call. if (chdir(original_dir) != 0) { - DeathTestAbort(std::string("chdir(\"") + original_dir + "\") failed: " + - GetLastErrnoDescription()); + DeathTestAbort(std::string("chdir(\"") + original_dir + + "\") failed: " + GetLastErrnoDescription()); return EXIT_FAILURE; } @@ -1256,13 +1237,12 @@ static int ExecDeathTestChildMain(void* child_arg) { // one path separator. execv(args->argv[0], args->argv); DeathTestAbort(std::string("execv(") + args->argv[0] + ", ...) in " + - original_dir + " failed: " + - GetLastErrnoDescription()); + original_dir + " failed: " + GetLastErrnoDescription()); return EXIT_FAILURE; } -# endif // GTEST_OS_QNX +#endif // GTEST_OS_QNX -# if GTEST_HAS_CLONE +#if GTEST_HAS_CLONE // Two utility routines that together determine the direction the stack // grows. // This could be accomplished more elegantly by a single recursive @@ -1296,7 +1276,7 @@ static bool StackGrowsDown() { StackLowerThanAddress(&dummy, &result); return result; } -# endif // GTEST_HAS_CLONE +#endif // GTEST_HAS_CLONE // Spawns a child process with the same executable as the current process in // a thread-safe manner and instructs it to run the death test. The @@ -1306,10 +1286,10 @@ static bool StackGrowsDown() { // spawn(2) there instead. The function dies with an error message if // anything goes wrong. static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { - ExecDeathTestArgs args = { argv, close_fd }; + ExecDeathTestArgs args = {argv, close_fd}; pid_t child_pid = -1; -# if GTEST_OS_QNX +#if GTEST_OS_QNX // Obtains the current directory and sets it to be closed in the child // process. const int cwd_fd = open(".", O_RDONLY); @@ -1322,16 +1302,16 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { UnitTest::GetInstance()->original_working_dir(); // We can safely call chdir() as it's a direct system call. if (chdir(original_dir) != 0) { - DeathTestAbort(std::string("chdir(\"") + original_dir + "\") failed: " + - GetLastErrnoDescription()); + DeathTestAbort(std::string("chdir(\"") + original_dir + + "\") failed: " + GetLastErrnoDescription()); return EXIT_FAILURE; } int fd_flags; // Set close_fd to be closed after spawn. GTEST_DEATH_TEST_CHECK_SYSCALL_(fd_flags = fcntl(close_fd, F_GETFD)); - GTEST_DEATH_TEST_CHECK_SYSCALL_(fcntl(close_fd, F_SETFD, - fd_flags | FD_CLOEXEC)); + GTEST_DEATH_TEST_CHECK_SYSCALL_( + fcntl(close_fd, F_SETFD, fd_flags | FD_CLOEXEC)); struct inheritance inherit = {0}; // spawn is a system call. child_pid = spawn(args.argv[0], 0, nullptr, &inherit, args.argv, environ); @@ -1339,8 +1319,8 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { GTEST_DEATH_TEST_CHECK_(fchdir(cwd_fd) != -1); GTEST_DEATH_TEST_CHECK_SYSCALL_(close(cwd_fd)); -# else // GTEST_OS_QNX -# if GTEST_OS_LINUX +#else // GTEST_OS_QNX +#if GTEST_OS_LINUX // When a SIGPROF signal is received while fork() or clone() are executing, // the process may hang. To avoid this, we ignore SIGPROF here and re-enable // it after the call to fork()/clone() is complete. @@ -1349,11 +1329,11 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { memset(&ignore_sigprof_action, 0, sizeof(ignore_sigprof_action)); sigemptyset(&ignore_sigprof_action.sa_mask); ignore_sigprof_action.sa_handler = SIG_IGN; - GTEST_DEATH_TEST_CHECK_SYSCALL_(sigaction( - SIGPROF, &ignore_sigprof_action, &saved_sigprof_action)); -# endif // GTEST_OS_LINUX + GTEST_DEATH_TEST_CHECK_SYSCALL_( + sigaction(SIGPROF, &ignore_sigprof_action, &saved_sigprof_action)); +#endif // GTEST_OS_LINUX -# if GTEST_HAS_CLONE +#if GTEST_HAS_CLONE const bool use_fork = GTEST_FLAG_GET(death_test_use_fork); if (!use_fork) { @@ -1373,7 +1353,7 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { const size_t kMaxStackAlignment = 64; void* const stack_top = static_cast(stack) + - (stack_grows_down ? stack_size - kMaxStackAlignment : 0); + (stack_grows_down ? stack_size - kMaxStackAlignment : 0); GTEST_DEATH_TEST_CHECK_( static_cast(stack_size) > kMaxStackAlignment && reinterpret_cast(stack_top) % kMaxStackAlignment == 0); @@ -1382,19 +1362,19 @@ static pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) { GTEST_DEATH_TEST_CHECK_(munmap(stack, stack_size) != -1); } -# else +#else const bool use_fork = true; -# endif // GTEST_HAS_CLONE +#endif // GTEST_HAS_CLONE if (use_fork && (child_pid = fork()) == 0) { - ExecDeathTestChildMain(&args); - _exit(0); + ExecDeathTestChildMain(&args); + _exit(0); } -# endif // GTEST_OS_QNX -# if GTEST_OS_LINUX +#endif // GTEST_OS_QNX +#if GTEST_OS_LINUX GTEST_DEATH_TEST_CHECK_SYSCALL_( sigaction(SIGPROF, &saved_sigprof_action, nullptr)); -# endif // GTEST_OS_LINUX +#endif // GTEST_OS_LINUX GTEST_DEATH_TEST_CHECK_(child_pid != -1); return child_pid; @@ -1450,7 +1430,7 @@ DeathTest::TestRole ExecDeathTest::AssumeRole() { return OVERSEE_TEST; } -# endif // !GTEST_OS_WINDOWS +#endif // !GTEST_OS_WINDOWS // Creates a concrete DeathTest-derived class that depends on the // --gtest_death_test_style flag, and sets the pointer pointed to @@ -1464,15 +1444,15 @@ bool DefaultDeathTestFactory::Create(const char* statement, UnitTestImpl* const impl = GetUnitTestImpl(); const InternalRunDeathTestFlag* const flag = impl->internal_run_death_test_flag(); - const int death_test_index = impl->current_test_info() - ->increment_death_test_count(); + const int death_test_index = + impl->current_test_info()->increment_death_test_count(); if (flag != nullptr) { if (death_test_index > flag->index()) { DeathTest::set_last_death_test_message( - "Death test count (" + StreamableToString(death_test_index) - + ") somehow exceeded expected maximum (" - + StreamableToString(flag->index()) + ")"); + "Death test count (" + StreamableToString(death_test_index) + + ") somehow exceeded expected maximum (" + + StreamableToString(flag->index()) + ")"); return false; } @@ -1483,21 +1463,21 @@ bool DefaultDeathTestFactory::Create(const char* statement, } } -# if GTEST_OS_WINDOWS +#if GTEST_OS_WINDOWS if (GTEST_FLAG_GET(death_test_style) == "threadsafe" || GTEST_FLAG_GET(death_test_style) == "fast") { *test = new WindowsDeathTest(statement, std::move(matcher), file, line); } -# elif GTEST_OS_FUCHSIA +#elif GTEST_OS_FUCHSIA if (GTEST_FLAG_GET(death_test_style) == "threadsafe" || GTEST_FLAG_GET(death_test_style) == "fast") { *test = new FuchsiaDeathTest(statement, std::move(matcher), file, line); } -# else +#else if (GTEST_FLAG_GET(death_test_style) == "threadsafe") { *test = new ExecDeathTest(statement, std::move(matcher), file, line); @@ -1505,7 +1485,7 @@ bool DefaultDeathTestFactory::Create(const char* statement, *test = new NoExecDeathTest(statement, std::move(matcher)); } -# endif // GTEST_OS_WINDOWS +#endif // GTEST_OS_WINDOWS else { // NOLINT - this is more readable than unbalanced brackets inside #if. DeathTest::set_last_death_test_message("Unknown death test style \"" + @@ -1517,16 +1497,16 @@ bool DefaultDeathTestFactory::Create(const char* statement, return true; } -# if GTEST_OS_WINDOWS +#if GTEST_OS_WINDOWS // Recreates the pipe and event handles from the provided parameters, // signals the event, and returns a file descriptor wrapped around the pipe // handle. This function is called in the child process only. static int GetStatusFileDescriptor(unsigned int parent_process_id, - size_t write_handle_as_size_t, - size_t event_handle_as_size_t) { + size_t write_handle_as_size_t, + size_t event_handle_as_size_t) { AutoHandle parent_process_handle(::OpenProcess(PROCESS_DUP_HANDLE, - FALSE, // Non-inheritable. - parent_process_id)); + FALSE, // Non-inheritable. + parent_process_id)); if (parent_process_handle.Get() == INVALID_HANDLE_VALUE) { DeathTestAbort("Unable to open parent process " + StreamableToString(parent_process_id)); @@ -1534,8 +1514,7 @@ static int GetStatusFileDescriptor(unsigned int parent_process_id, GTEST_CHECK_(sizeof(HANDLE) <= sizeof(size_t)); - const HANDLE write_handle = - reinterpret_cast(write_handle_as_size_t); + const HANDLE write_handle = reinterpret_cast(write_handle_as_size_t); HANDLE dup_write_handle; // The newly initialized handle is accessible only in the parent @@ -1557,9 +1536,7 @@ static int GetStatusFileDescriptor(unsigned int parent_process_id, HANDLE dup_event_handle; if (!::DuplicateHandle(parent_process_handle.Get(), event_handle, - ::GetCurrentProcess(), &dup_event_handle, - 0x0, - FALSE, + ::GetCurrentProcess(), &dup_event_handle, 0x0, FALSE, DUPLICATE_SAME_ACCESS)) { DeathTestAbort("Unable to duplicate the event handle " + StreamableToString(event_handle_as_size_t) + @@ -1581,7 +1558,7 @@ static int GetStatusFileDescriptor(unsigned int parent_process_id, return write_fd; } -# endif // GTEST_OS_WINDOWS +#endif // GTEST_OS_WINDOWS // Returns a newly created InternalRunDeathTestFlag object with fields // initialized from the GTEST_FLAG(internal_run_death_test) flag if @@ -1597,45 +1574,41 @@ InternalRunDeathTestFlag* ParseInternalRunDeathTestFlag() { SplitString(GTEST_FLAG_GET(internal_run_death_test), '|', &fields); int write_fd = -1; -# if GTEST_OS_WINDOWS +#if GTEST_OS_WINDOWS unsigned int parent_process_id = 0; size_t write_handle_as_size_t = 0; size_t event_handle_as_size_t = 0; - if (fields.size() != 6 - || !ParseNaturalNumber(fields[1], &line) - || !ParseNaturalNumber(fields[2], &index) - || !ParseNaturalNumber(fields[3], &parent_process_id) - || !ParseNaturalNumber(fields[4], &write_handle_as_size_t) - || !ParseNaturalNumber(fields[5], &event_handle_as_size_t)) { + if (fields.size() != 6 || !ParseNaturalNumber(fields[1], &line) || + !ParseNaturalNumber(fields[2], &index) || + !ParseNaturalNumber(fields[3], &parent_process_id) || + !ParseNaturalNumber(fields[4], &write_handle_as_size_t) || + !ParseNaturalNumber(fields[5], &event_handle_as_size_t)) { DeathTestAbort("Bad --gtest_internal_run_death_test flag: " + GTEST_FLAG_GET(internal_run_death_test)); } - write_fd = GetStatusFileDescriptor(parent_process_id, - write_handle_as_size_t, + write_fd = GetStatusFileDescriptor(parent_process_id, write_handle_as_size_t, event_handle_as_size_t); -# elif GTEST_OS_FUCHSIA +#elif GTEST_OS_FUCHSIA - if (fields.size() != 3 - || !ParseNaturalNumber(fields[1], &line) - || !ParseNaturalNumber(fields[2], &index)) { + if (fields.size() != 3 || !ParseNaturalNumber(fields[1], &line) || + !ParseNaturalNumber(fields[2], &index)) { DeathTestAbort("Bad --gtest_internal_run_death_test flag: " + GTEST_FLAG_GET(internal_run_death_test)); } -# else +#else - if (fields.size() != 4 - || !ParseNaturalNumber(fields[1], &line) - || !ParseNaturalNumber(fields[2], &index) - || !ParseNaturalNumber(fields[3], &write_fd)) { + if (fields.size() != 4 || !ParseNaturalNumber(fields[1], &line) || + !ParseNaturalNumber(fields[2], &index) || + !ParseNaturalNumber(fields[3], &write_fd)) { DeathTestAbort("Bad --gtest_internal_run_death_test flag: " + GTEST_FLAG_GET(internal_run_death_test)); } -# endif // GTEST_OS_WINDOWS +#endif // GTEST_OS_WINDOWS return new InternalRunDeathTestFlag(fields[0], line, index, write_fd); } diff --git a/deps/googletest/src/gtest-filepath.cc b/deps/googletest/src/gtest-filepath.cc index 0b5629401b5a87..9d79ea49d8ae66 100644 --- a/deps/googletest/src/gtest-filepath.cc +++ b/deps/googletest/src/gtest-filepath.cc @@ -30,31 +30,35 @@ #include "gtest/internal/gtest-filepath.h" #include -#include "gtest/internal/gtest-port.h" + #include "gtest/gtest-message.h" +#include "gtest/internal/gtest-port.h" #if GTEST_OS_WINDOWS_MOBILE -# include +#include #elif GTEST_OS_WINDOWS -# include -# include +#include +#include #else -# include -# include // Some Linux distributions define PATH_MAX here. -#endif // GTEST_OS_WINDOWS_MOBILE +#include + +#include // Some Linux distributions define PATH_MAX here. +#endif // GTEST_OS_WINDOWS_MOBILE #include "gtest/internal/gtest-string.h" #if GTEST_OS_WINDOWS -# define GTEST_PATH_MAX_ _MAX_PATH +#define GTEST_PATH_MAX_ _MAX_PATH #elif defined(PATH_MAX) -# define GTEST_PATH_MAX_ PATH_MAX +#define GTEST_PATH_MAX_ PATH_MAX #elif defined(_XOPEN_PATH_MAX) -# define GTEST_PATH_MAX_ _XOPEN_PATH_MAX +#define GTEST_PATH_MAX_ _XOPEN_PATH_MAX #else -# define GTEST_PATH_MAX_ _POSIX_PATH_MAX +#define GTEST_PATH_MAX_ _POSIX_PATH_MAX #endif // GTEST_OS_WINDOWS +#if GTEST_HAS_FILE_SYSTEM + namespace testing { namespace internal { @@ -66,16 +70,16 @@ namespace internal { const char kPathSeparator = '\\'; const char kAlternatePathSeparator = '/'; const char kAlternatePathSeparatorString[] = "/"; -# if GTEST_OS_WINDOWS_MOBILE +#if GTEST_OS_WINDOWS_MOBILE // Windows CE doesn't have a current directory. You should not use // the current directory in tests on Windows CE, but this at least // provides a reasonable fallback. const char kCurrentDirectoryString[] = "\\"; // Windows CE doesn't define INVALID_FILE_ATTRIBUTES const DWORD kInvalidFileAttributes = 0xffffffff; -# else +#else const char kCurrentDirectoryString[] = ".\\"; -# endif // GTEST_OS_WINDOWS_MOBILE +#endif // GTEST_OS_WINDOWS_MOBILE #else const char kPathSeparator = '/'; const char kCurrentDirectoryString[] = "./"; @@ -94,22 +98,22 @@ static bool IsPathSeparator(char c) { FilePath FilePath::GetCurrentDir() { #if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \ GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_ESP32 || \ - GTEST_OS_XTENSA + GTEST_OS_XTENSA || GTEST_OS_QURT // These platforms do not have a current directory, so we just return // something reasonable. return FilePath(kCurrentDirectoryString); #elif GTEST_OS_WINDOWS - char cwd[GTEST_PATH_MAX_ + 1] = { '\0' }; + char cwd[GTEST_PATH_MAX_ + 1] = {'\0'}; return FilePath(_getcwd(cwd, sizeof(cwd)) == nullptr ? "" : cwd); #else - char cwd[GTEST_PATH_MAX_ + 1] = { '\0' }; + char cwd[GTEST_PATH_MAX_ + 1] = {'\0'}; char* result = getcwd(cwd, sizeof(cwd)); -# if GTEST_OS_NACL +#if GTEST_OS_NACL // getcwd will likely fail in NaCl due to the sandbox, so return something // reasonable. The user may have provided a shim implementation for getcwd, // however, so fallback only when failure is detected. return FilePath(result == nullptr ? kCurrentDirectoryString : cwd); -# endif // GTEST_OS_NACL +#endif // GTEST_OS_NACL return FilePath(result == nullptr ? "" : cwd); #endif // GTEST_OS_WINDOWS_MOBILE } @@ -121,8 +125,8 @@ FilePath FilePath::GetCurrentDir() { FilePath FilePath::RemoveExtension(const char* extension) const { const std::string dot_extension = std::string(".") + extension; if (String::EndsWithCaseInsensitive(pathname_, dot_extension)) { - return FilePath(pathname_.substr( - 0, pathname_.length() - dot_extension.length())); + return FilePath( + pathname_.substr(0, pathname_.length() - dot_extension.length())); } return *this; } @@ -143,6 +147,45 @@ const char* FilePath::FindLastPathSeparator() const { return last_sep; } +size_t FilePath::CalculateRootLength() const { + const auto &path = pathname_; + auto s = path.begin(); + auto end = path.end(); +#if GTEST_OS_WINDOWS + if (end - s >= 2 && s[1] == ':' && + (end - s == 2 || IsPathSeparator(s[2])) && + (('A' <= s[0] && s[0] <= 'Z') || ('a' <= s[0] && s[0] <= 'z'))) { + // A typical absolute path like "C:\Windows" or "D:" + s += 2; + if (s != end) { + ++s; + } + } else if (end - s >= 3 && IsPathSeparator(*s) && IsPathSeparator(*(s + 1)) + && !IsPathSeparator(*(s + 2))) { + // Move past the "\\" prefix in a UNC path like "\\Server\Share\Folder" + s += 2; + // Skip 2 components and their following separators ("Server\" and "Share\") + for (int i = 0; i < 2; ++i) { + while (s != end) { + bool stop = IsPathSeparator(*s); + ++s; + if (stop) { + break; + } + } + } + } else if (s != end && IsPathSeparator(*s)) { + // A drive-rooted path like "\Windows" + ++s; + } +#else + if (s != end && IsPathSeparator(*s)) { + ++s; + } +#endif + return static_cast(s - path.begin()); +} + // Returns a copy of the FilePath with the directory part removed. // Example: FilePath("path/to/file").RemoveDirectoryName() returns // FilePath("file"). If there is no directory part ("just_a_file"), it returns @@ -178,15 +221,14 @@ FilePath FilePath::RemoveFileName() const { // than zero (e.g., 12), returns "dir/test_12.xml". // On Windows platform, uses \ as the separator rather than /. FilePath FilePath::MakeFileName(const FilePath& directory, - const FilePath& base_name, - int number, + const FilePath& base_name, int number, const char* extension) { std::string file; if (number == 0) { file = base_name.string() + "." + extension; } else { - file = base_name.string() + "_" + StreamableToString(number) - + "." + extension; + file = + base_name.string() + "_" + StreamableToString(number) + "." + extension; } return ConcatPaths(directory, FilePath(file)); } @@ -195,8 +237,7 @@ FilePath FilePath::MakeFileName(const FilePath& directory, // On Windows, uses \ as the separator rather than /. FilePath FilePath::ConcatPaths(const FilePath& directory, const FilePath& relative_path) { - if (directory.IsEmpty()) - return relative_path; + if (directory.IsEmpty()) return relative_path; const FilePath dir(directory.RemoveTrailingPathSeparator()); return FilePath(dir.string() + kPathSeparator + relative_path.string()); } @@ -207,7 +248,7 @@ bool FilePath::FileOrDirectoryExists() const { #if GTEST_OS_WINDOWS_MOBILE LPCWSTR unicode = String::AnsiToUtf16(pathname_.c_str()); const DWORD attributes = GetFileAttributes(unicode); - delete [] unicode; + delete[] unicode; return attributes != kInvalidFileAttributes; #else posix::StatStruct file_stat{}; @@ -222,8 +263,8 @@ bool FilePath::DirectoryExists() const { #if GTEST_OS_WINDOWS // Don't strip off trailing separator if path is a root directory on // Windows (like "C:\\"). - const FilePath& path(IsRootDirectory() ? *this : - RemoveTrailingPathSeparator()); + const FilePath& path(IsRootDirectory() ? *this + : RemoveTrailingPathSeparator()); #else const FilePath& path(*this); #endif @@ -231,42 +272,31 @@ bool FilePath::DirectoryExists() const { #if GTEST_OS_WINDOWS_MOBILE LPCWSTR unicode = String::AnsiToUtf16(path.c_str()); const DWORD attributes = GetFileAttributes(unicode); - delete [] unicode; + delete[] unicode; if ((attributes != kInvalidFileAttributes) && (attributes & FILE_ATTRIBUTE_DIRECTORY)) { result = true; } #else posix::StatStruct file_stat{}; - result = posix::Stat(path.c_str(), &file_stat) == 0 && - posix::IsDir(file_stat); + result = + posix::Stat(path.c_str(), &file_stat) == 0 && posix::IsDir(file_stat); #endif // GTEST_OS_WINDOWS_MOBILE return result; } // Returns true if pathname describes a root directory. (Windows has one -// root directory per disk drive.) +// root directory per disk drive. UNC share roots are also included.) bool FilePath::IsRootDirectory() const { -#if GTEST_OS_WINDOWS - return pathname_.length() == 3 && IsAbsolutePath(); -#else - return pathname_.length() == 1 && IsPathSeparator(pathname_.c_str()[0]); -#endif + size_t root_length = CalculateRootLength(); + return root_length > 0 && root_length == pathname_.size() && + IsPathSeparator(pathname_[root_length - 1]); } // Returns true if pathname describes an absolute path. bool FilePath::IsAbsolutePath() const { - const char* const name = pathname_.c_str(); -#if GTEST_OS_WINDOWS - return pathname_.length() >= 3 && - ((name[0] >= 'a' && name[0] <= 'z') || - (name[0] >= 'A' && name[0] <= 'Z')) && - name[1] == ':' && - IsPathSeparator(name[2]); -#else - return IsPathSeparator(name[0]); -#endif + return CalculateRootLength() > 0; } // Returns a pathname for a file that does not currently exist. The pathname @@ -321,10 +351,10 @@ bool FilePath::CreateFolder() const { FilePath removed_sep(this->RemoveTrailingPathSeparator()); LPCWSTR unicode = String::AnsiToUtf16(removed_sep.c_str()); int result = CreateDirectory(unicode, nullptr) ? 0 : -1; - delete [] unicode; + delete[] unicode; #elif GTEST_OS_WINDOWS int result = _mkdir(pathname_.c_str()); -#elif GTEST_OS_ESP8266 || GTEST_OS_XTENSA +#elif GTEST_OS_ESP8266 || GTEST_OS_XTENSA || GTEST_OS_QURT // do nothing int result = 0; #else @@ -341,25 +371,34 @@ bool FilePath::CreateFolder() const { // name, otherwise return the name string unmodified. // On Windows platform, uses \ as the separator, other platforms use /. FilePath FilePath::RemoveTrailingPathSeparator() const { - return IsDirectory() - ? FilePath(pathname_.substr(0, pathname_.length() - 1)) - : *this; + return IsDirectory() ? FilePath(pathname_.substr(0, pathname_.length() - 1)) + : *this; } // Removes any redundant separators that might be in the pathname. // For example, "bar///foo" becomes "bar/foo". Does not eliminate other // redundancies that might be in a pathname involving "." or "..". +// Note that "\\Host\Share" does not contain a redundancy on Windows! void FilePath::Normalize() { auto out = pathname_.begin(); - for (const char character : pathname_) { + auto i = pathname_.cbegin(); +#if GTEST_OS_WINDOWS + // UNC paths are treated specially + if (pathname_.end() - i >= 3 && IsPathSeparator(*i) && + IsPathSeparator(*(i + 1)) && !IsPathSeparator(*(i + 2))) { + *(out++) = kPathSeparator; + *(out++) = kPathSeparator; + } +#endif + while (i != pathname_.end()) { + const char character = *i; if (!IsPathSeparator(character)) { *(out++) = character; } else if (out == pathname_.begin() || *std::prev(out) != kPathSeparator) { *(out++) = kPathSeparator; - } else { - continue; } + ++i; } pathname_.erase(out, pathname_.end()); @@ -367,3 +406,5 @@ void FilePath::Normalize() { } // namespace internal } // namespace testing + +#endif // GTEST_HAS_FILE_SYSTEM diff --git a/deps/googletest/src/gtest-internal-inl.h b/deps/googletest/src/gtest-internal-inl.h index 075b84c258eaa9..2c9db4f207854d 100644 --- a/deps/googletest/src/gtest-internal-inl.h +++ b/deps/googletest/src/gtest-internal-inl.h @@ -35,7 +35,7 @@ #define GOOGLETEST_SRC_GTEST_INTERNAL_INL_H_ #ifndef _WIN32_WCE -# include +#include #endif // !_WIN32_WCE #include #include // For strtoll/_strtoul64/malloc/free. @@ -44,22 +44,23 @@ #include #include #include +#include #include #include #include "gtest/internal/gtest-port.h" #if GTEST_CAN_STREAM_RESULTS_ -# include // NOLINT -# include // NOLINT +#include // NOLINT +#include // NOLINT #endif #if GTEST_OS_WINDOWS -# include // NOLINT -#endif // GTEST_OS_WINDOWS +#include // NOLINT +#endif // GTEST_OS_WINDOWS -#include "gtest/gtest.h" #include "gtest/gtest-spi.h" +#include "gtest/gtest.h" GTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \ /* class A needs to have dll-interface to be used by clients of class B */) @@ -109,15 +110,16 @@ GTEST_API_ bool ParseFlag(const char* str, const char* flag, int32_t* value); // Returns a random seed in range [1, kMaxRandomSeed] based on the // given --gtest_random_seed flag value. inline int GetRandomSeedFromFlag(int32_t random_seed_flag) { - const unsigned int raw_seed = (random_seed_flag == 0) ? - static_cast(GetTimeInMillis()) : - static_cast(random_seed_flag); + const unsigned int raw_seed = + (random_seed_flag == 0) ? static_cast(GetTimeInMillis()) + : static_cast(random_seed_flag); // Normalizes the actual seed to range [1, kMaxRandomSeed] such that // it's easy to type. const int normalized_seed = static_cast((raw_seed - 1U) % - static_cast(kMaxRandomSeed)) + 1; + static_cast(kMaxRandomSeed)) + + 1; return normalized_seed; } @@ -211,7 +213,7 @@ class GTestFlagSaver { int32_t stack_trace_depth_; std::string stream_result_to_; bool throw_on_failure_; -} GTEST_ATTRIBUTE_UNUSED_; +}; // Converts a Unicode code point to a narrow string in UTF-8 encoding. // code_point parameter is of type UInt32 because wchar_t may not be @@ -261,8 +263,8 @@ GTEST_API_ int32_t Int32FromEnvOrDie(const char* env_var, int32_t default_val); // returns true if and only if the test should be run on this shard. The test id // is some arbitrary but unique non-negative integer assigned to each test // method. Assumes that 0 <= shard_index < total_shards. -GTEST_API_ bool ShouldRunTestOnShard( - int total_shards, int shard_index, int test_id); +GTEST_API_ bool ShouldRunTestOnShard(int total_shards, int shard_index, + int test_id); // STL container utilities. @@ -273,9 +275,8 @@ inline int CountIf(const Container& c, Predicate predicate) { // Implemented as an explicit loop since std::count_if() in libCstd on // Solaris has a non-standard signature. int count = 0; - for (typename Container::const_iterator it = c.begin(); it != c.end(); ++it) { - if (predicate(*it)) - ++count; + for (auto it = c.begin(); it != c.end(); ++it) { + if (predicate(*it)) ++count; } return count; } @@ -396,9 +397,11 @@ class GTEST_API_ UnitTestOptions { static bool MatchesFilter(const std::string& name, const char* filter); }; +#if GTEST_HAS_FILE_SYSTEM // Returns the current application's name, removing directory path if that // is present. Used by UnitTestOptions::GetOutputFile. GTEST_API_ FilePath GetCurrentExecutableName(); +#endif // GTEST_HAS_FILE_SYSTEM // The role interface for getting the OS stack trace as a string. class OsStackTraceGetterInterface { @@ -424,7 +427,9 @@ class OsStackTraceGetterInterface { static const char* const kElidedFramesMarker; private: - GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetterInterface); + OsStackTraceGetterInterface(const OsStackTraceGetterInterface&) = delete; + OsStackTraceGetterInterface& operator=(const OsStackTraceGetterInterface&) = + delete; }; // A working implementation of the OsStackTraceGetterInterface interface. @@ -446,7 +451,8 @@ class OsStackTraceGetter : public OsStackTraceGetterInterface { void* caller_frame_ = nullptr; #endif // GTEST_HAS_ABSL - GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetter); + OsStackTraceGetter(const OsStackTraceGetter&) = delete; + OsStackTraceGetter& operator=(const OsStackTraceGetter&) = delete; }; // Information about a Google Test trace point. @@ -459,7 +465,7 @@ struct TraceInfo { // This is the default global test part result reporter used in UnitTestImpl. // This class should only be used by UnitTestImpl. class DefaultGlobalTestPartResultReporter - : public TestPartResultReporterInterface { + : public TestPartResultReporterInterface { public: explicit DefaultGlobalTestPartResultReporter(UnitTestImpl* unit_test); // Implements the TestPartResultReporterInterface. Reports the test part @@ -469,7 +475,10 @@ class DefaultGlobalTestPartResultReporter private: UnitTestImpl* const unit_test_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultGlobalTestPartResultReporter); + DefaultGlobalTestPartResultReporter( + const DefaultGlobalTestPartResultReporter&) = delete; + DefaultGlobalTestPartResultReporter& operator=( + const DefaultGlobalTestPartResultReporter&) = delete; }; // This is the default per thread test part result reporter used in @@ -485,7 +494,10 @@ class DefaultPerThreadTestPartResultReporter private: UnitTestImpl* const unit_test_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultPerThreadTestPartResultReporter); + DefaultPerThreadTestPartResultReporter( + const DefaultPerThreadTestPartResultReporter&) = delete; + DefaultPerThreadTestPartResultReporter& operator=( + const DefaultPerThreadTestPartResultReporter&) = delete; }; // The private implementation of the UnitTest class. We don't protect @@ -498,9 +510,9 @@ class GTEST_API_ UnitTestImpl { virtual ~UnitTestImpl(); // There are two different ways to register your own TestPartResultReporter. - // You can register your own repoter to listen either only for test results + // You can register your own reporter to listen either only for test results // from the current thread or for results from all threads. - // By default, each per-thread test result repoter just passes a new + // By default, each per-thread test result reporter just passes a new // TestPartResult to the global test result reporter, which registers the // test part result for the currently running test. @@ -623,7 +635,8 @@ class GTEST_API_ UnitTestImpl { // For example, if Foo() calls Bar(), which in turn calls // CurrentOsStackTraceExceptTop(1), Foo() will be included in the // trace but Bar() and CurrentOsStackTraceExceptTop() won't. - std::string CurrentOsStackTraceExceptTop(int skip_count) GTEST_NO_INLINE_; + std::string CurrentOsStackTraceExceptTop(int skip_count) + GTEST_NO_INLINE_ GTEST_NO_TAIL_CALL_; // Finds and returns a TestSuite with the given name. If one doesn't // exist, creates one and returns it. @@ -727,9 +740,7 @@ class GTEST_API_ UnitTestImpl { } // Clears the results of ad-hoc test assertions. - void ClearAdHocTestResult() { - ad_hoc_test_result_.Clear(); - } + void ClearAdHocTestResult() { ad_hoc_test_result_.Clear(); } // Adds a TestProperty to the current TestResult object when invoked in a // context of a test or a test suite, or to the global property set. If the @@ -737,10 +748,7 @@ class GTEST_API_ UnitTestImpl { // updated. void RecordProperty(const TestProperty& test_property); - enum ReactionToSharding { - HONOR_SHARDING_PROTOCOL, - IGNORE_SHARDING_PROTOCOL - }; + enum ReactionToSharding { HONOR_SHARDING_PROTOCOL, IGNORE_SHARDING_PROTOCOL }; // Matches the full name of each test against the user-specified // filter to decide whether the test should run, then records the @@ -835,9 +843,11 @@ class GTEST_API_ UnitTestImpl { // The UnitTest object that owns this implementation object. UnitTest* const parent_; +#if GTEST_HAS_FILE_SYSTEM // The working directory when the first TEST() or TEST_F() was // executed. internal::FilePath original_working_dir_; +#endif // GTEST_HAS_FILE_SYSTEM // The default test part result reporters. DefaultGlobalTestPartResultReporter default_global_test_part_result_reporter_; @@ -845,7 +855,7 @@ class GTEST_API_ UnitTestImpl { default_per_thread_test_part_result_reporter_; // Points to (but doesn't own) the global test part result reporter. - TestPartResultReporterInterface* global_test_part_result_repoter_; + TestPartResultReporterInterface* global_test_part_result_reporter_; // Protects read and write access to global_test_part_result_reporter_. internal::Mutex global_test_part_result_reporter_mutex_; @@ -946,7 +956,8 @@ class GTEST_API_ UnitTestImpl { // starts. bool catch_exceptions_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTestImpl); + UnitTestImpl(const UnitTestImpl&) = delete; + UnitTestImpl& operator=(const UnitTestImpl&) = delete; }; // class UnitTestImpl // Convenience function for accessing the global UnitTest @@ -969,8 +980,9 @@ GTEST_API_ bool IsValidEscape(char ch); GTEST_API_ bool AtomMatchesChar(bool escaped, char pattern, char ch); GTEST_API_ bool ValidateRegex(const char* regex); GTEST_API_ bool MatchRegexAtHead(const char* regex, const char* str); -GTEST_API_ bool MatchRepetitionAndRegexAtHead( - bool escaped, char ch, char repeat, const char* regex, const char* str); +GTEST_API_ bool MatchRepetitionAndRegexAtHead(bool escaped, char ch, + char repeat, const char* regex, + const char* str); GTEST_API_ bool MatchRegexAnywhere(const char* regex, const char* str); #endif // GTEST_USES_SIMPLE_RE @@ -1072,8 +1084,7 @@ class StreamingListener : public EmptyTestEventListener { } ~SocketWriter() override { - if (sockfd_ != -1) - CloseConnection(); + if (sockfd_ != -1) CloseConnection(); } // Sends a string to the socket. @@ -1083,9 +1094,8 @@ class StreamingListener : public EmptyTestEventListener { const auto len = static_cast(message.length()); if (write(sockfd_, message.c_str(), len) != static_cast(len)) { - GTEST_LOG_(WARNING) - << "stream_result_to: failed to stream to " - << host_name_ << ":" << port_num_; + GTEST_LOG_(WARNING) << "stream_result_to: failed to stream to " + << host_name_ << ":" << port_num_; } } @@ -1106,7 +1116,8 @@ class StreamingListener : public EmptyTestEventListener { const std::string host_name_; const std::string port_num_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(SocketWriter); + SocketWriter(const SocketWriter&) = delete; + SocketWriter& operator=(const SocketWriter&) = delete; }; // class SocketWriter // Escapes '=', '&', '%', and '\n' characters in str as "%xx". @@ -1118,7 +1129,9 @@ class StreamingListener : public EmptyTestEventListener { } explicit StreamingListener(AbstractSocketWriter* socket_writer) - : socket_writer_(socket_writer) { Start(); } + : socket_writer_(socket_writer) { + Start(); + } void OnTestProgramStart(const UnitTest& /* unit_test */) override { SendLn("event=TestProgramStart"); @@ -1141,22 +1154,22 @@ class StreamingListener : public EmptyTestEventListener { void OnTestIterationEnd(const UnitTest& unit_test, int /* iteration */) override { - SendLn("event=TestIterationEnd&passed=" + - FormatBool(unit_test.Passed()) + "&elapsed_time=" + - StreamableToString(unit_test.elapsed_time()) + "ms"); + SendLn("event=TestIterationEnd&passed=" + FormatBool(unit_test.Passed()) + + "&elapsed_time=" + StreamableToString(unit_test.elapsed_time()) + + "ms"); } // Note that "event=TestCaseStart" is a wire format and has to remain // "case" for compatibility - void OnTestCaseStart(const TestCase& test_case) override { - SendLn(std::string("event=TestCaseStart&name=") + test_case.name()); + void OnTestSuiteStart(const TestSuite& test_suite) override { + SendLn(std::string("event=TestCaseStart&name=") + test_suite.name()); } // Note that "event=TestCaseEnd" is a wire format and has to remain // "case" for compatibility - void OnTestCaseEnd(const TestCase& test_case) override { - SendLn("event=TestCaseEnd&passed=" + FormatBool(test_case.Passed()) + - "&elapsed_time=" + StreamableToString(test_case.elapsed_time()) + + void OnTestSuiteEnd(const TestSuite& test_suite) override { + SendLn("event=TestCaseEnd&passed=" + FormatBool(test_suite.Passed()) + + "&elapsed_time=" + StreamableToString(test_suite.elapsed_time()) + "ms"); } @@ -1166,8 +1179,7 @@ class StreamingListener : public EmptyTestEventListener { void OnTestEnd(const TestInfo& test_info) override { SendLn("event=TestEnd&passed=" + - FormatBool((test_info.result())->Passed()) + - "&elapsed_time=" + + FormatBool((test_info.result())->Passed()) + "&elapsed_time=" + StreamableToString((test_info.result())->elapsed_time()) + "ms"); } @@ -1191,7 +1203,8 @@ class StreamingListener : public EmptyTestEventListener { const std::unique_ptr socket_writer_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamingListener); + StreamingListener(const StreamingListener&) = delete; + StreamingListener& operator=(const StreamingListener&) = delete; }; // class StreamingListener #endif // GTEST_CAN_STREAM_RESULTS_ diff --git a/deps/googletest/src/gtest-matchers.cc b/deps/googletest/src/gtest-matchers.cc index 65104ebab1ba9a..7e3bcc0cff3806 100644 --- a/deps/googletest/src/gtest-matchers.cc +++ b/deps/googletest/src/gtest-matchers.cc @@ -32,12 +32,13 @@ // This file implements just enough of the matcher interface to allow // EXPECT_DEATH and friends to accept a matcher argument. -#include "gtest/internal/gtest-internal.h" -#include "gtest/internal/gtest-port.h" #include "gtest/gtest-matchers.h" #include +#include "gtest/internal/gtest-internal.h" +#include "gtest/internal/gtest-port.h" + namespace testing { // Constructs a matcher that matches a const std::string& whose value is diff --git a/deps/googletest/src/gtest-port.cc b/deps/googletest/src/gtest-port.cc index c3c93e6185d402..d797fe4d5866c7 100644 --- a/deps/googletest/src/gtest-port.cc +++ b/deps/googletest/src/gtest-port.cc @@ -27,61 +27,62 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - #include "gtest/internal/gtest-port.h" #include #include #include #include + #include #include #include #if GTEST_OS_WINDOWS -# include -# include -# include -# include // Used in ThreadLocal. -# ifdef _MSC_VER -# include -# endif // _MSC_VER +#include +#include +#include + +#include // Used in ThreadLocal. +#ifdef _MSC_VER +#include +#endif // _MSC_VER #else -# include +#include #endif // GTEST_OS_WINDOWS #if GTEST_OS_MAC -# include -# include -# include +#include +#include +#include #endif // GTEST_OS_MAC #if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \ GTEST_OS_NETBSD || GTEST_OS_OPENBSD -# include -# if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD -# include -# endif +#include +#if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD +#include +#endif #endif #if GTEST_OS_QNX -# include -# include -# include +#include +#include +#include #endif // GTEST_OS_QNX #if GTEST_OS_AIX -# include -# include +#include +#include #endif // GTEST_OS_AIX #if GTEST_OS_FUCHSIA -# include -# include +#include +#include #endif // GTEST_OS_FUCHSIA -#include "gtest/gtest-spi.h" #include "gtest/gtest-message.h" +#include "gtest/gtest-spi.h" #include "gtest/internal/gtest-internal.h" #include "gtest/internal/gtest-string.h" #include "src/gtest-internal-inl.h" @@ -89,15 +90,6 @@ namespace testing { namespace internal { -#if defined(_MSC_VER) || defined(__BORLANDC__) -// MSVC and C++Builder do not provide a definition of STDERR_FILENO. -const int kStdOutFileno = 1; -const int kStdErrFileno = 2; -#else -const int kStdOutFileno = STDOUT_FILENO; -const int kStdErrFileno = STDERR_FILENO; -#endif // _MSC_VER - #if GTEST_OS_LINUX || GTEST_OS_GNU_HURD namespace { @@ -131,8 +123,7 @@ size_t GetThreadCount() { if (status == KERN_SUCCESS) { // task_threads allocates resources in thread_list and we need to free them // to avoid leaks. - vm_deallocate(task, - reinterpret_cast(thread_list), + vm_deallocate(task, reinterpret_cast(thread_list), sizeof(thread_t) * thread_count); return static_cast(thread_count); } else { @@ -141,7 +132,7 @@ size_t GetThreadCount() { } #elif GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \ - GTEST_OS_NETBSD + GTEST_OS_NETBSD #if GTEST_OS_NETBSD #undef KERN_PROC @@ -184,12 +175,12 @@ size_t GetThreadCount() { // we cannot detect it. size_t GetThreadCount() { int mib[] = { - CTL_KERN, - KERN_PROC, - KERN_PROC_PID | KERN_PROC_SHOW_THREADS, - getpid(), - sizeof(struct kinfo_proc), - 0, + CTL_KERN, + KERN_PROC, + KERN_PROC_PID | KERN_PROC_SHOW_THREADS, + getpid(), + sizeof(struct kinfo_proc), + 0, }; u_int miblen = sizeof(mib) / sizeof(mib[0]); @@ -210,8 +201,7 @@ size_t GetThreadCount() { // exclude empty members size_t nthreads = 0; for (size_t i = 0; i < size / static_cast(mib[4]); i++) { - if (info[i].p_tid != -1) - nthreads++; + if (info[i].p_tid != -1) nthreads++; } return nthreads; } @@ -254,13 +244,9 @@ size_t GetThreadCount() { size_t GetThreadCount() { int dummy_buffer; size_t avail; - zx_status_t status = zx_object_get_info( - zx_process_self(), - ZX_INFO_PROCESS_THREADS, - &dummy_buffer, - 0, - nullptr, - &avail); + zx_status_t status = + zx_object_get_info(zx_process_self(), ZX_INFO_PROCESS_THREADS, + &dummy_buffer, 0, nullptr, &avail); if (status == ZX_OK) { return avail; } else { @@ -280,27 +266,15 @@ size_t GetThreadCount() { #if GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS -void SleepMilliseconds(int n) { - ::Sleep(static_cast(n)); -} +AutoHandle::AutoHandle() : handle_(INVALID_HANDLE_VALUE) {} -AutoHandle::AutoHandle() - : handle_(INVALID_HANDLE_VALUE) {} +AutoHandle::AutoHandle(Handle handle) : handle_(handle) {} -AutoHandle::AutoHandle(Handle handle) - : handle_(handle) {} +AutoHandle::~AutoHandle() { Reset(); } -AutoHandle::~AutoHandle() { - Reset(); -} - -AutoHandle::Handle AutoHandle::Get() const { - return handle_; -} +AutoHandle::Handle AutoHandle::Get() const { return handle_; } -void AutoHandle::Reset() { - Reset(INVALID_HANDLE_VALUE); -} +void AutoHandle::Reset() { Reset(INVALID_HANDLE_VALUE); } void AutoHandle::Reset(HANDLE handle) { // Resetting with the same handle we already own is invalid. @@ -312,7 +286,7 @@ void AutoHandle::Reset(HANDLE handle) { } else { GTEST_CHECK_(!IsCloseable()) << "Resetting a valid handle to itself is likely a programmer error " - "and thus not allowed."; + "and thus not allowed."; } } @@ -322,23 +296,6 @@ bool AutoHandle::IsCloseable() const { return handle_ != nullptr && handle_ != INVALID_HANDLE_VALUE; } -Notification::Notification() - : event_(::CreateEvent(nullptr, // Default security attributes. - TRUE, // Do not reset automatically. - FALSE, // Initially unset. - nullptr)) { // Anonymous event. - GTEST_CHECK_(event_.Get() != nullptr); -} - -void Notification::Notify() { - GTEST_CHECK_(::SetEvent(event_.Get()) != FALSE); -} - -void Notification::WaitForNotification() { - GTEST_CHECK_( - ::WaitForSingleObject(event_.Get(), INFINITE) == WAIT_OBJECT_0); -} - Mutex::Mutex() : owner_thread_id_(0), type_(kDynamic), @@ -391,25 +348,25 @@ namespace { // MemoryIsNotDeallocated memory_is_not_deallocated; // critical_section_ = new CRITICAL_SECTION; // -class MemoryIsNotDeallocated -{ +class MemoryIsNotDeallocated { public: MemoryIsNotDeallocated() : old_crtdbg_flag_(0) { old_crtdbg_flag_ = _CrtSetDbgFlag(_CRTDBG_REPORT_FLAG); // Set heap allocation block type to _IGNORE_BLOCK so that MS debug CRT // doesn't report mem leak if there's no matching deallocation. - _CrtSetDbgFlag(old_crtdbg_flag_ & ~_CRTDBG_ALLOC_MEM_DF); + (void)_CrtSetDbgFlag(old_crtdbg_flag_ & ~_CRTDBG_ALLOC_MEM_DF); } ~MemoryIsNotDeallocated() { // Restore the original _CRTDBG_ALLOC_MEM_DF flag - _CrtSetDbgFlag(old_crtdbg_flag_); + (void)_CrtSetDbgFlag(old_crtdbg_flag_); } private: int old_crtdbg_flag_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(MemoryIsNotDeallocated); + MemoryIsNotDeallocated(const MemoryIsNotDeallocated&) = delete; + MemoryIsNotDeallocated& operator=(const MemoryIsNotDeallocated&) = delete; }; #endif // _MSC_VER @@ -435,15 +392,13 @@ void Mutex::ThreadSafeLazyInit() { ::InitializeCriticalSection(critical_section_); // Updates the critical_section_init_phase_ to 2 to signal // initialization complete. - GTEST_CHECK_(::InterlockedCompareExchange( - &critical_section_init_phase_, 2L, 1L) == - 1L); + GTEST_CHECK_(::InterlockedCompareExchange(&critical_section_init_phase_, + 2L, 1L) == 1L); break; case 1: // Somebody else is already initializing the mutex; spin until they // are done. - while (::InterlockedCompareExchange(&critical_section_init_phase_, - 2L, + while (::InterlockedCompareExchange(&critical_section_init_phase_, 2L, 2L) != 2L) { // Possibly yields the rest of the thread's time slice to other // threads. @@ -488,9 +443,7 @@ class ThreadWithParamSupport : public ThreadWithParamBase { private: struct ThreadMainParam { ThreadMainParam(Runnable* runnable, Notification* thread_can_start) - : runnable_(runnable), - thread_can_start_(thread_can_start) { - } + : runnable_(runnable), thread_can_start_(thread_can_start) {} std::unique_ptr runnable_; // Does not own. Notification* thread_can_start_; @@ -508,20 +461,18 @@ class ThreadWithParamSupport : public ThreadWithParamBase { // Prohibit instantiation. ThreadWithParamSupport(); - GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParamSupport); + ThreadWithParamSupport(const ThreadWithParamSupport&) = delete; + ThreadWithParamSupport& operator=(const ThreadWithParamSupport&) = delete; }; } // namespace -ThreadWithParamBase::ThreadWithParamBase(Runnable *runnable, +ThreadWithParamBase::ThreadWithParamBase(Runnable* runnable, Notification* thread_can_start) - : thread_(ThreadWithParamSupport::CreateThread(runnable, - thread_can_start)) { -} + : thread_( + ThreadWithParamSupport::CreateThread(runnable, thread_can_start)) {} -ThreadWithParamBase::~ThreadWithParamBase() { - Join(); -} +ThreadWithParamBase::~ThreadWithParamBase() { Join(); } void ThreadWithParamBase::Join() { GTEST_CHECK_(::WaitForSingleObject(thread_.Get(), INFINITE) == WAIT_OBJECT_0) @@ -548,8 +499,10 @@ class ThreadLocalRegistryImpl { ThreadIdToThreadLocals::iterator thread_local_pos = thread_to_thread_locals->find(current_thread); if (thread_local_pos == thread_to_thread_locals->end()) { - thread_local_pos = thread_to_thread_locals->insert( - std::make_pair(current_thread, ThreadLocalValues())).first; + thread_local_pos = + thread_to_thread_locals + ->insert(std::make_pair(current_thread, ThreadLocalValues())) + .first; StartWatcherThreadFor(current_thread); } ThreadLocalValues& thread_local_values = thread_local_pos->second; @@ -577,9 +530,8 @@ class ThreadLocalRegistryImpl { ThreadIdToThreadLocals* const thread_to_thread_locals = GetThreadLocalsMapLocked(); for (ThreadIdToThreadLocals::iterator it = - thread_to_thread_locals->begin(); - it != thread_to_thread_locals->end(); - ++it) { + thread_to_thread_locals->begin(); + it != thread_to_thread_locals->end(); ++it) { ThreadLocalValues& thread_local_values = it->second; ThreadLocalValues::iterator value_pos = thread_local_values.find(thread_local_instance); @@ -609,9 +561,8 @@ class ThreadLocalRegistryImpl { if (thread_local_pos != thread_to_thread_locals->end()) { ThreadLocalValues& thread_local_values = thread_local_pos->second; for (ThreadLocalValues::iterator value_pos = - thread_local_values.begin(); - value_pos != thread_local_values.end(); - ++value_pos) { + thread_local_values.begin(); + value_pos != thread_local_values.end(); ++value_pos) { value_holders.push_back(value_pos->second); } thread_to_thread_locals->erase(thread_local_pos); @@ -637,9 +588,8 @@ class ThreadLocalRegistryImpl { static void StartWatcherThreadFor(DWORD thread_id) { // The returned handle will be kept in thread_map and closed by // watcher_thread in WatcherThreadFunc. - HANDLE thread = ::OpenThread(SYNCHRONIZE | THREAD_QUERY_INFORMATION, - FALSE, - thread_id); + HANDLE thread = + ::OpenThread(SYNCHRONIZE | THREAD_QUERY_INFORMATION, FALSE, thread_id); GTEST_CHECK_(thread != nullptr); // We need to pass a valid thread ID pointer into CreateThread for it // to work correctly under Win98. @@ -650,7 +600,8 @@ class ThreadLocalRegistryImpl { &ThreadLocalRegistryImpl::WatcherThreadFunc, reinterpret_cast(new ThreadIdAndHandle(thread_id, thread)), CREATE_SUSPENDED, &watcher_thread_id); - GTEST_CHECK_(watcher_thread != nullptr); + GTEST_CHECK_(watcher_thread != nullptr) + << "CreateThread failed with error " << ::GetLastError() << "."; // Give the watcher thread the same priority as ours to avoid being // blocked by it. ::SetThreadPriority(watcher_thread, @@ -664,8 +615,7 @@ class ThreadLocalRegistryImpl { static DWORD WINAPI WatcherThreadFunc(LPVOID param) { const ThreadIdAndHandle* tah = reinterpret_cast(param); - GTEST_CHECK_( - ::WaitForSingleObject(tah->second, INFINITE) == WAIT_OBJECT_0); + GTEST_CHECK_(::WaitForSingleObject(tah->second, INFINITE) == WAIT_OBJECT_0); OnThreadExit(tah->first); ::CloseHandle(tah->second); delete tah; @@ -689,16 +639,17 @@ class ThreadLocalRegistryImpl { }; Mutex ThreadLocalRegistryImpl::mutex_(Mutex::kStaticMutex); // NOLINT -Mutex ThreadLocalRegistryImpl::thread_map_mutex_(Mutex::kStaticMutex); // NOLINT +Mutex ThreadLocalRegistryImpl::thread_map_mutex_( + Mutex::kStaticMutex); // NOLINT ThreadLocalValueHolderBase* ThreadLocalRegistry::GetValueOnCurrentThread( - const ThreadLocalBase* thread_local_instance) { + const ThreadLocalBase* thread_local_instance) { return ThreadLocalRegistryImpl::GetValueOnCurrentThread( thread_local_instance); } void ThreadLocalRegistry::OnThreadLocalDestroyed( - const ThreadLocalBase* thread_local_instance) { + const ThreadLocalBase* thread_local_instance) { ThreadLocalRegistryImpl::OnThreadLocalDestroyed(thread_local_instance); } @@ -786,7 +737,7 @@ bool IsRepeat(char ch) { return IsInSet(ch, "?*+"); } bool IsAsciiWhiteSpace(char ch) { return IsInSet(ch, " \f\n\r\t\v"); } bool IsAsciiWordChar(char ch) { return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') || - ('0' <= ch && ch <= '9') || ch == '_'; + ('0' <= ch && ch <= '9') || ch == '_'; } // Returns true if and only if "\\c" is a supported escape sequence. @@ -799,17 +750,28 @@ bool IsValidEscape(char c) { bool AtomMatchesChar(bool escaped, char pattern_char, char ch) { if (escaped) { // "\\p" where p is pattern_char. switch (pattern_char) { - case 'd': return IsAsciiDigit(ch); - case 'D': return !IsAsciiDigit(ch); - case 'f': return ch == '\f'; - case 'n': return ch == '\n'; - case 'r': return ch == '\r'; - case 's': return IsAsciiWhiteSpace(ch); - case 'S': return !IsAsciiWhiteSpace(ch); - case 't': return ch == '\t'; - case 'v': return ch == '\v'; - case 'w': return IsAsciiWordChar(ch); - case 'W': return !IsAsciiWordChar(ch); + case 'd': + return IsAsciiDigit(ch); + case 'D': + return !IsAsciiDigit(ch); + case 'f': + return ch == '\f'; + case 'n': + return ch == '\n'; + case 'r': + return ch == '\r'; + case 's': + return IsAsciiWhiteSpace(ch); + case 'S': + return !IsAsciiWhiteSpace(ch); + case 't': + return ch == '\t'; + case 'v': + return ch == '\v'; + case 'w': + return IsAsciiWordChar(ch); + case 'W': + return !IsAsciiWordChar(ch); } return IsAsciiPunct(pattern_char) && pattern_char == ch; } @@ -820,7 +782,8 @@ bool AtomMatchesChar(bool escaped, char pattern_char, char ch) { // Helper function used by ValidateRegex() to format error messages. static std::string FormatRegexSyntaxError(const char* regex, int index) { return (Message() << "Syntax error at index " << index - << " in simple regular expression \"" << regex << "\": ").GetString(); + << " in simple regular expression \"" << regex << "\": ") + .GetString(); } // Generates non-fatal failures and returns false if regex is invalid; @@ -862,12 +825,12 @@ bool ValidateRegex(const char* regex) { << "'$' can only appear at the end."; is_valid = false; } else if (IsInSet(ch, "()[]{}|")) { - ADD_FAILURE() << FormatRegexSyntaxError(regex, i) - << "'" << ch << "' is unsupported."; + ADD_FAILURE() << FormatRegexSyntaxError(regex, i) << "'" << ch + << "' is unsupported."; is_valid = false; } else if (IsRepeat(ch) && !prev_repeatable) { - ADD_FAILURE() << FormatRegexSyntaxError(regex, i) - << "'" << ch << "' can only follow a repeatable token."; + ADD_FAILURE() << FormatRegexSyntaxError(regex, i) << "'" << ch + << "' can only follow a repeatable token."; is_valid = false; } @@ -885,12 +848,10 @@ bool ValidateRegex(const char* regex) { // characters to be indexable by size_t, in which case the test will // probably time out anyway. We are fine with this limitation as // std::string has it too. -bool MatchRepetitionAndRegexAtHead( - bool escaped, char c, char repeat, const char* regex, - const char* str) { +bool MatchRepetitionAndRegexAtHead(bool escaped, char c, char repeat, + const char* regex, const char* str) { const size_t min_count = (repeat == '+') ? 1 : 0; - const size_t max_count = (repeat == '?') ? 1 : - static_cast(-1) - 1; + const size_t max_count = (repeat == '?') ? 1 : static_cast(-1) - 1; // We cannot call numeric_limits::max() as it conflicts with the // max() macro on Windows. @@ -903,8 +864,7 @@ bool MatchRepetitionAndRegexAtHead( // greedy match. return true; } - if (str[i] == '\0' || !AtomMatchesChar(escaped, c, str[i])) - return false; + if (str[i] == '\0' || !AtomMatchesChar(escaped, c, str[i])) return false; } return false; } @@ -918,25 +878,23 @@ bool MatchRegexAtHead(const char* regex, const char* str) { // "$" only matches the end of a string. Note that regex being // valid guarantees that there's nothing after "$" in it. - if (*regex == '$') - return *str == '\0'; + if (*regex == '$') return *str == '\0'; // Is the first thing in regex an escape sequence? const bool escaped = *regex == '\\'; - if (escaped) - ++regex; + if (escaped) ++regex; if (IsRepeat(regex[1])) { // MatchRepetitionAndRegexAtHead() calls MatchRegexAtHead(), so // here's an indirect recursion. It terminates as the regex gets // shorter in each recursion. - return MatchRepetitionAndRegexAtHead( - escaped, regex[0], regex[1], regex + 2, str); + return MatchRepetitionAndRegexAtHead(escaped, regex[0], regex[1], regex + 2, + str); } else { // regex isn't empty, isn't "$", and doesn't start with a // repetition. We match the first atom of regex with the first // character of str and recurse. return (*str != '\0') && AtomMatchesChar(escaped, *regex, *str) && - MatchRegexAtHead(regex + 1, str + 1); + MatchRegexAtHead(regex + 1, str + 1); } } @@ -951,13 +909,11 @@ bool MatchRegexAtHead(const char* regex, const char* str) { bool MatchRegexAnywhere(const char* regex, const char* str) { if (regex == nullptr || str == nullptr) return false; - if (*regex == '^') - return MatchRegexAtHead(regex + 1, str); + if (*regex == '^') return MatchRegexAtHead(regex + 1, str); // A successful match can be anywhere in str. do { - if (MatchRegexAtHead(regex, str)) - return true; + if (MatchRegexAtHead(regex, str)) return true; } while (*str++ != '\0'); return false; } @@ -1038,8 +994,8 @@ GTEST_API_ ::std::string FormatFileLocation(const char* file, int line) { // FormatFileLocation in order to contrast the two functions. // Note that FormatCompilerIndependentFileLocation() does NOT append colon // to the file location it produces, unlike FormatFileLocation(). -GTEST_API_ ::std::string FormatCompilerIndependentFileLocation( - const char* file, int line) { +GTEST_API_ ::std::string FormatCompilerIndependentFileLocation(const char* file, + int line) { const std::string file_name(file == nullptr ? kUnknownFile : file); if (line < 0) @@ -1050,12 +1006,13 @@ GTEST_API_ ::std::string FormatCompilerIndependentFileLocation( GTestLog::GTestLog(GTestLogSeverity severity, const char* file, int line) : severity_(severity) { - const char* const marker = - severity == GTEST_INFO ? "[ INFO ]" : - severity == GTEST_WARNING ? "[WARNING]" : - severity == GTEST_ERROR ? "[ ERROR ]" : "[ FATAL ]"; - GetStream() << ::std::endl << marker << " " - << FormatFileLocation(file, line).c_str() << ": "; + const char* const marker = severity == GTEST_INFO ? "[ INFO ]" + : severity == GTEST_WARNING ? "[WARNING]" + : severity == GTEST_ERROR ? "[ ERROR ]" + : "[ FATAL ]"; + GetStream() << ::std::endl + << marker << " " << FormatFileLocation(file, line).c_str() + << ": "; } // Flushes the buffers and, if severity is GTEST_FATAL, aborts the program. @@ -1078,27 +1035,26 @@ class CapturedStream { public: // The ctor redirects the stream to a temporary file. explicit CapturedStream(int fd) : fd_(fd), uncaptured_fd_(dup(fd)) { -# if GTEST_OS_WINDOWS - char temp_dir_path[MAX_PATH + 1] = { '\0' }; // NOLINT - char temp_file_path[MAX_PATH + 1] = { '\0' }; // NOLINT +#if GTEST_OS_WINDOWS + char temp_dir_path[MAX_PATH + 1] = {'\0'}; // NOLINT + char temp_file_path[MAX_PATH + 1] = {'\0'}; // NOLINT ::GetTempPathA(sizeof(temp_dir_path), temp_dir_path); - const UINT success = ::GetTempFileNameA(temp_dir_path, - "gtest_redir", + const UINT success = ::GetTempFileNameA(temp_dir_path, "gtest_redir", 0, // Generate unique file name. temp_file_path); GTEST_CHECK_(success != 0) << "Unable to create a temporary file in " << temp_dir_path; const int captured_fd = creat(temp_file_path, _S_IREAD | _S_IWRITE); - GTEST_CHECK_(captured_fd != -1) << "Unable to open temporary file " - << temp_file_path; + GTEST_CHECK_(captured_fd != -1) + << "Unable to open temporary file " << temp_file_path; filename_ = temp_file_path; -# else +#else // There's no guarantee that a test has write access to the current // directory, so we create the temporary file in a temporary directory. std::string name_template; -# if GTEST_OS_LINUX_ANDROID +#if GTEST_OS_LINUX_ANDROID // Note: Android applications are expected to call the framework's // Context.getExternalStorageDirectory() method through JNI to get // the location of the world-writable SD Card directory. However, @@ -1111,7 +1067,7 @@ class CapturedStream { // '/sdcard' and other variants cannot be relied on, as they are not // guaranteed to be mounted, or may have a delay in mounting. name_template = "/data/local/tmp/"; -# elif GTEST_OS_IOS +#elif GTEST_OS_IOS char user_temp_dir[PATH_MAX + 1]; // Documented alternative to NSTemporaryDirectory() (for obtaining creating @@ -1132,9 +1088,9 @@ class CapturedStream { name_template = user_temp_dir; if (name_template.back() != GTEST_PATH_SEP_[0]) name_template.push_back(GTEST_PATH_SEP_[0]); -# else +#else name_template = "/tmp/"; -# endif +#endif name_template.append("gtest_captured_stream.XXXXXX"); // mkstemp() modifies the string bytes in place, and does not go beyond the @@ -1150,15 +1106,13 @@ class CapturedStream { << " for test; does the test have access to the /tmp directory?"; } filename_ = std::move(name_template); -# endif // GTEST_OS_WINDOWS +#endif // GTEST_OS_WINDOWS fflush(nullptr); dup2(captured_fd, fd_); close(captured_fd); } - ~CapturedStream() { - remove(filename_.c_str()); - } + ~CapturedStream() { remove(filename_.c_str()); } std::string GetCapturedString() { if (uncaptured_fd_ != -1) { @@ -1185,7 +1139,8 @@ class CapturedStream { // Name of the temporary file holding the stderr output. ::std::string filename_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(CapturedStream); + CapturedStream(const CapturedStream&) = delete; + CapturedStream& operator=(const CapturedStream&) = delete; }; GTEST_DISABLE_MSC_DEPRECATED_POP_() @@ -1213,6 +1168,15 @@ static std::string GetCapturedStream(CapturedStream** captured_stream) { return content; } +#if defined(_MSC_VER) || defined(__BORLANDC__) +// MSVC and C++Builder do not provide a definition of STDERR_FILENO. +const int kStdOutFileno = 1; +const int kStdErrFileno = 2; +#else +const int kStdOutFileno = STDOUT_FILENO; +const int kStdErrFileno = STDERR_FILENO; +#endif // defined(_MSC_VER) || defined(__BORLANDC__) + // Starts capturing stdout. void CaptureStdout() { CaptureStream(kStdOutFileno, "stdout", &g_captured_stdout); @@ -1235,10 +1199,6 @@ std::string GetCapturedStderr() { #endif // GTEST_HAS_STREAM_REDIRECTION - - - - size_t GetFileSize(FILE* file) { fseek(file, 0, SEEK_END); return static_cast(ftell(file)); @@ -1256,7 +1216,8 @@ std::string ReadEntireFile(FILE* file) { // Keeps reading the file until we cannot read further or the // pre-determined file size is reached. do { - bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file); + bytes_last_read = + fread(buffer + bytes_read, 1, file_size - bytes_read, file); bytes_read += bytes_last_read; } while (bytes_last_read > 0 && bytes_read < file_size); @@ -1344,7 +1305,7 @@ bool ParseInt32(const Message& src_text, const char* str, int32_t* value) { // LONG_MAX or LONG_MIN when the input overflows.) result != long_value // The parsed value overflows as an int32_t. - ) { + ) { Message msg; msg << "WARNING: " << src_text << " is expected to be a 32-bit integer, but actually" @@ -1388,8 +1349,8 @@ int32_t Int32FromGTestEnv(const char* flag, int32_t default_value) { } int32_t result = default_value; - if (!ParseInt32(Message() << "Environment variable " << env_var, - string_value, &result)) { + if (!ParseInt32(Message() << "Environment variable " << env_var, string_value, + &result)) { printf("The default value %s is used.\n", (Message() << default_value).GetString().c_str()); fflush(stdout); @@ -1408,7 +1369,7 @@ int32_t Int32FromGTestEnv(const char* flag, int32_t default_value) { // not check that the flag is 'output' // In essence this checks an env variable called XML_OUTPUT_FILE // and if it is set we prepend "xml:" to its value, if it not set we return "" -std::string OutputFlagAlsoCheckEnvVar(){ +std::string OutputFlagAlsoCheckEnvVar() { std::string default_value_for_output_flag = ""; const char* xml_output_file_env = posix::GetEnv("XML_OUTPUT_FILE"); if (nullptr != xml_output_file_env) { diff --git a/deps/googletest/src/gtest-printers.cc b/deps/googletest/src/gtest-printers.cc index 41e29ccd608a99..d475ad36f5eccf 100644 --- a/deps/googletest/src/gtest-printers.cc +++ b/deps/googletest/src/gtest-printers.cc @@ -27,7 +27,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - // Google Test - The Google C++ Testing and Mocking Framework // // This file implements a universal value printer that can print a @@ -101,7 +100,7 @@ void PrintBytesInObjectToImpl(const unsigned char* obj_bytes, size_t count, PrintByteSegmentInObjectTo(obj_bytes, 0, kChunkSize, os); *os << " ... "; // Rounds up to 2-byte boundary. - const size_t resume_pos = (count - kChunkSize + 1)/2*2; + const size_t resume_pos = (count - kChunkSize + 1) / 2 * 2; PrintByteSegmentInObjectTo(obj_bytes, resume_pos, count - resume_pos, os); } *os << ">"; @@ -136,11 +135,7 @@ void PrintBytesInObjectTo(const unsigned char* obj_bytes, size_t count, // - as is if it's a printable ASCII (e.g. 'a', '2', ' '), // - as a hexadecimal escape sequence (e.g. '\x7F'), or // - as a special escape sequence (e.g. '\r', '\n'). -enum CharFormat { - kAsIs, - kHexEscape, - kSpecialEscape -}; +enum CharFormat { kAsIs, kHexEscape, kSpecialEscape }; // Returns true if c is a printable ASCII character. We test the // value of c directly instead of calling isprint(), which is buggy on @@ -213,35 +208,21 @@ static CharFormat PrintAsStringLiteralTo(char32_t c, ostream* os) { } } -static const char* GetCharWidthPrefix(char) { - return ""; -} +static const char* GetCharWidthPrefix(char) { return ""; } -static const char* GetCharWidthPrefix(signed char) { - return ""; -} +static const char* GetCharWidthPrefix(signed char) { return ""; } -static const char* GetCharWidthPrefix(unsigned char) { - return ""; -} +static const char* GetCharWidthPrefix(unsigned char) { return ""; } #ifdef __cpp_char8_t -static const char* GetCharWidthPrefix(char8_t) { - return "u8"; -} +static const char* GetCharWidthPrefix(char8_t) { return "u8"; } #endif -static const char* GetCharWidthPrefix(char16_t) { - return "u"; -} +static const char* GetCharWidthPrefix(char16_t) { return "u"; } -static const char* GetCharWidthPrefix(char32_t) { - return "U"; -} +static const char* GetCharWidthPrefix(char32_t) { return "U"; } -static const char* GetCharWidthPrefix(wchar_t) { - return "L"; -} +static const char* GetCharWidthPrefix(wchar_t) { return "L"; } // Prints a char c as if it's part of a string literal, escaping it when // necessary; returns how c was formatted. @@ -276,8 +257,7 @@ void PrintCharAndCodeTo(Char c, ostream* os) { // To aid user debugging, we also print c's code in decimal, unless // it's 0 (in which case c was printed as '\\0', making the code // obvious). - if (c == 0) - return; + if (c == 0) return; *os << " (" << static_cast(c); // For more convenience, we print c's code again in hexadecimal, @@ -304,17 +284,60 @@ void PrintTo(char32_t c, ::std::ostream* os) { << static_cast(c); } +// gcc/clang __{u,}int128_t +#if defined(__SIZEOF_INT128__) +void PrintTo(__uint128_t v, ::std::ostream* os) { + if (v == 0) { + *os << "0"; + return; + } + + // Buffer large enough for ceil(log10(2^128))==39 and the null terminator + char buf[40]; + char* p = buf + sizeof(buf); + + // Some configurations have a __uint128_t, but no support for built in + // division. Do manual long division instead. + + uint64_t high = static_cast(v >> 64); + uint64_t low = static_cast(v); + + *--p = 0; + while (high != 0 || low != 0) { + uint64_t high_mod = high % 10; + high = high / 10; + // This is the long division algorithm specialized for a divisor of 10 and + // only two elements. + // Notable values: + // 2^64 / 10 == 1844674407370955161 + // 2^64 % 10 == 6 + const uint64_t carry = 6 * high_mod + low % 10; + low = low / 10 + high_mod * 1844674407370955161 + carry / 10; + + char digit = static_cast(carry % 10); + *--p = static_cast('0' + digit); + } + *os << p; +} +void PrintTo(__int128_t v, ::std::ostream* os) { + __uint128_t uv = static_cast<__uint128_t>(v); + if (v < 0) { + *os << "-"; + uv = -uv; + } + PrintTo(uv, os); +} +#endif // __SIZEOF_INT128__ + // Prints the given array of characters to the ostream. CharType must be either // char, char8_t, char16_t, char32_t, or wchar_t. // The array starts at begin, the length is len, it may include '\0' characters // and may not be NUL-terminated. template -GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ -GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ -GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ -GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ -static CharFormat PrintCharsAsStringTo( - const CharType* begin, size_t len, ostream* os) { +GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ + GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ + GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ static CharFormat + PrintCharsAsStringTo(const CharType* begin, size_t len, ostream* os) { const char* const quote_prefix = GetCharWidthPrefix(*begin); *os << quote_prefix << "\""; bool is_previous_hex = false; @@ -340,12 +363,11 @@ static CharFormat PrintCharsAsStringTo( // Prints a (const) char/wchar_t array of 'len' elements, starting at address // 'begin'. CharType must be either char or wchar_t. template -GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ -GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ -GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ -GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ -static void UniversalPrintCharArray( - const CharType* begin, size_t len, ostream* os) { +GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ + GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ + GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ static void + UniversalPrintCharArray(const CharType* begin, size_t len, + ostream* os) { // The code // const char kFoo[] = "foo"; // generates an array of 4, not 3, elements, with the last one being '\0'. @@ -436,28 +458,28 @@ void PrintTo(const wchar_t* s, ostream* os) { PrintCStringTo(s, os); } namespace { bool ContainsUnprintableControlCodes(const char* str, size_t length) { - const unsigned char *s = reinterpret_cast(str); + const unsigned char* s = reinterpret_cast(str); for (size_t i = 0; i < length; i++) { unsigned char ch = *s++; if (std::iscntrl(ch)) { - switch (ch) { + switch (ch) { case '\t': case '\n': case '\r': break; default: return true; - } } + } } return false; } -bool IsUTF8TrailByte(unsigned char t) { return 0x80 <= t && t<= 0xbf; } +bool IsUTF8TrailByte(unsigned char t) { return 0x80 <= t && t <= 0xbf; } bool IsValidUTF8(const char* str, size_t length) { - const unsigned char *s = reinterpret_cast(str); + const unsigned char* s = reinterpret_cast(str); for (size_t i = 0; i < length;) { unsigned char lead = s[i++]; @@ -470,15 +492,13 @@ bool IsValidUTF8(const char* str, size_t length) { } else if (lead <= 0xdf && (i + 1) <= length && IsUTF8TrailByte(s[i])) { ++i; // 2-byte character } else if (0xe0 <= lead && lead <= 0xef && (i + 2) <= length && - IsUTF8TrailByte(s[i]) && - IsUTF8TrailByte(s[i + 1]) && + IsUTF8TrailByte(s[i]) && IsUTF8TrailByte(s[i + 1]) && // check for non-shortest form and surrogate (lead != 0xe0 || s[i] >= 0xa0) && (lead != 0xed || s[i] < 0xa0)) { i += 2; // 3-byte character } else if (0xf0 <= lead && lead <= 0xf4 && (i + 3) <= length && - IsUTF8TrailByte(s[i]) && - IsUTF8TrailByte(s[i + 1]) && + IsUTF8TrailByte(s[i]) && IsUTF8TrailByte(s[i + 1]) && IsUTF8TrailByte(s[i + 2]) && // check for non-shortest form (lead != 0xf0 || s[i] >= 0x90) && diff --git a/deps/googletest/src/gtest-test-part.cc b/deps/googletest/src/gtest-test-part.cc index a938683ceded2f..eb7c8d1cf92353 100644 --- a/deps/googletest/src/gtest-test-part.cc +++ b/deps/googletest/src/gtest-test-part.cc @@ -51,13 +51,11 @@ std::ostream& operator<<(std::ostream& os, const TestPartResult& result) { return os << internal::FormatFileLocation(result.file_name(), result.line_number()) << " " - << (result.type() == TestPartResult::kSuccess - ? "Success" - : result.type() == TestPartResult::kSkip - ? "Skipped" - : result.type() == TestPartResult::kFatalFailure - ? "Fatal failure" - : "Non-fatal failure") + << (result.type() == TestPartResult::kSuccess ? "Success" + : result.type() == TestPartResult::kSkip ? "Skipped" + : result.type() == TestPartResult::kFatalFailure + ? "Fatal failure" + : "Non-fatal failure") << ":\n" << result.message() << std::endl; } @@ -86,8 +84,8 @@ namespace internal { HasNewFatalFailureHelper::HasNewFatalFailureHelper() : has_new_fatal_failure_(false), - original_reporter_(GetUnitTestImpl()-> - GetTestPartResultReporterForCurrentThread()) { + original_reporter_( + GetUnitTestImpl()->GetTestPartResultReporterForCurrentThread()) { GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(this); } @@ -98,8 +96,7 @@ HasNewFatalFailureHelper::~HasNewFatalFailureHelper() { void HasNewFatalFailureHelper::ReportTestPartResult( const TestPartResult& result) { - if (result.fatally_failed()) - has_new_fatal_failure_ = true; + if (result.fatally_failed()) has_new_fatal_failure_ = true; original_reporter_->ReportTestPartResult(result); } diff --git a/deps/googletest/src/gtest-typed-test.cc b/deps/googletest/src/gtest-typed-test.cc index c02c3df6599527..a2828b83c66457 100644 --- a/deps/googletest/src/gtest-typed-test.cc +++ b/deps/googletest/src/gtest-typed-test.cc @@ -27,7 +27,6 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - #include "gtest/gtest-typed-test.h" #include "gtest/gtest.h" @@ -38,8 +37,7 @@ namespace internal { // Skips to the first non-space char in str. Returns an empty string if str // contains only whitespace characters. static const char* SkipSpaces(const char* str) { - while (IsSpace(*str)) - str++; + while (IsSpace(*str)) str++; return str; } @@ -85,8 +83,7 @@ const char* TypedTestSuitePState::VerifyRegisteredTestNames( } for (RegisteredTestIter it = registered_tests_.begin(); - it != registered_tests_.end(); - ++it) { + it != registered_tests_.end(); ++it) { if (tests.count(it->first) == 0) { errors << "You forgot to list test " << it->first << ".\n"; } diff --git a/deps/googletest/src/gtest.cc b/deps/googletest/src/gtest.cc index 5a38768e4c12fd..a64e887c969d2a 100644 --- a/deps/googletest/src/gtest.cc +++ b/deps/googletest/src/gtest.cc @@ -31,8 +31,6 @@ // The Google C++ Testing and Mocking Framework (Google Test) #include "gtest/gtest.h" -#include "gtest/internal/custom/gtest.h" -#include "gtest/gtest-spi.h" #include #include @@ -46,79 +44,88 @@ #include // NOLINT #include #include +#include #include +#include #include #include #include #include // NOLINT #include +#include #include +#include "gtest/gtest-assertion-result.h" +#include "gtest/gtest-spi.h" +#include "gtest/internal/custom/gtest.h" +#include "gtest/internal/gtest-port.h" + #if GTEST_OS_LINUX -# include // NOLINT -# include // NOLINT -# include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT // Declares vsnprintf(). This header is not available on Windows. -# include // NOLINT -# include // NOLINT -# include // NOLINT -# include // NOLINT -# include +#include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT + +#include #elif GTEST_OS_ZOS -# include // NOLINT +#include // NOLINT // On z/OS we additionally need strings.h for strcasecmp. -# include // NOLINT +#include // NOLINT #elif GTEST_OS_WINDOWS_MOBILE // We are on Windows CE. -# include // NOLINT -# undef min +#include // NOLINT +#undef min #elif GTEST_OS_WINDOWS // We are on Windows proper. -# include // NOLINT -# undef min +#include // NOLINT +#undef min #ifdef _MSC_VER -# include // NOLINT +#include // NOLINT #endif -# include // NOLINT -# include // NOLINT -# include // NOLINT -# include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT -# if GTEST_OS_WINDOWS_MINGW -# include // NOLINT -# endif // GTEST_OS_WINDOWS_MINGW +#if GTEST_OS_WINDOWS_MINGW +#include // NOLINT +#endif // GTEST_OS_WINDOWS_MINGW #else // cpplint thinks that the header is already included, so we want to // silence it. -# include // NOLINT -# include // NOLINT +#include // NOLINT +#include // NOLINT #endif // GTEST_OS_LINUX #if GTEST_HAS_EXCEPTIONS -# include +#include #endif #if GTEST_CAN_STREAM_RESULTS_ -# include // NOLINT -# include // NOLINT -# include // NOLINT -# include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT +#include // NOLINT #endif #include "src/gtest-internal-inl.h" #if GTEST_OS_WINDOWS -# define vsnprintf _vsnprintf +#define vsnprintf _vsnprintf #endif // GTEST_OS_WINDOWS #if GTEST_OS_MAC @@ -131,9 +138,20 @@ #include "absl/debugging/failure_signal_handler.h" #include "absl/debugging/stacktrace.h" #include "absl/debugging/symbolize.h" +#include "absl/flags/parse.h" +#include "absl/flags/usage.h" #include "absl/strings/str_cat.h" +#include "absl/strings/str_replace.h" #endif // GTEST_HAS_ABSL +// Checks builtin compiler feature |x| while avoiding an extra layer of #ifdefs +// at the callsite. +#if defined(__has_builtin) +#define GTEST_HAS_BUILTIN(x) __has_builtin(x) +#else +#define GTEST_HAS_BUILTIN(x) 0 +#endif // defined(__has_builtin) + namespace testing { using internal::CountIf; @@ -177,7 +195,8 @@ const char kStackTraceMarker[] = "\nStack trace:\n"; // is specified on the command line. bool g_help_flag = false; -// Utilty function to Open File for Writing +#if GTEST_HAS_FILE_SYSTEM +// Utility function to Open File for Writing static FILE* OpenFileForWriting(const std::string& output_file) { FILE* fileout = nullptr; FilePath output_file_path(output_file); @@ -191,6 +210,7 @@ static FILE* OpenFileForWriting(const std::string& output_file) { } return fileout; } +#endif // GTEST_HAS_FILE_SYSTEM } // namespace internal @@ -267,8 +287,7 @@ GTEST_DEFINE_bool_( "install a signal handler that dumps debugging information when fatal " "signals are raised."); -GTEST_DEFINE_bool_(list_tests, false, - "List all tests without running them."); +GTEST_DEFINE_bool_(list_tests, false, "List all tests without running them."); // The net priority order after flag processing is thus: // --gtest_output command line flag @@ -315,7 +334,7 @@ GTEST_DEFINE_int32_( GTEST_DEFINE_bool_( recreate_environments_when_repeating, testing::internal::BoolFromGTestEnv("recreate_environments_when_repeating", - true), + false), "Controls whether global test environments are recreated for each repeat " "of the tests. If set to false the global test environments are only set " "up once, for the first iteration, and only torn down once, for the last. " @@ -364,16 +383,17 @@ GTEST_DEFINE_string_( namespace testing { namespace internal { +const uint32_t Random::kMaxRange; + // Generates a random number from [0, range), using a Linear // Congruential Generator (LCG). Crashes if 'range' is 0 or greater // than kMaxRange. uint32_t Random::Generate(uint32_t range) { // These constants are the same as are used in glibc's rand(3). // Use wider types than necessary to prevent unsigned overflow diagnostics. - state_ = static_cast(1103515245ULL*state_ + 12345U) % kMaxRange; + state_ = static_cast(1103515245ULL * state_ + 12345U) % kMaxRange; - GTEST_CHECK_(range > 0) - << "Cannot generate a number in the range [0, 0)."; + GTEST_CHECK_(range > 0) << "Cannot generate a number in the range [0, 0)."; GTEST_CHECK_(range <= kMaxRange) << "Generation of a number in [0, " << range << ") was requested, " << "but this can only generate numbers in [0, " << kMaxRange << ")."; @@ -418,32 +438,26 @@ static bool ShouldRunTestSuite(const TestSuite* test_suite) { } // AssertHelper constructor. -AssertHelper::AssertHelper(TestPartResult::Type type, - const char* file, - int line, - const char* message) - : data_(new AssertHelperData(type, file, line, message)) { -} +AssertHelper::AssertHelper(TestPartResult::Type type, const char* file, + int line, const char* message) + : data_(new AssertHelperData(type, file, line, message)) {} -AssertHelper::~AssertHelper() { - delete data_; -} +AssertHelper::~AssertHelper() { delete data_; } // Message assignment, for assertion streaming support. void AssertHelper::operator=(const Message& message) const { - UnitTest::GetInstance()-> - AddTestPartResult(data_->type, data_->file, data_->line, - AppendUserMessage(data_->message, message), - UnitTest::GetInstance()->impl() - ->CurrentOsStackTraceExceptTop(1) - // Skips the stack frame for this function itself. - ); // NOLINT + UnitTest::GetInstance()->AddTestPartResult( + data_->type, data_->file, data_->line, + AppendUserMessage(data_->message, message), + UnitTest::GetInstance()->impl()->CurrentOsStackTraceExceptTop(1) + // Skips the stack frame for this function itself. + ); // NOLINT } namespace { // When TEST_P is found without a matching INSTANTIATE_TEST_SUITE_P -// to creates test cases for it, a syntetic test case is +// to creates test cases for it, a synthetic test case is // inserted to report ether an error or a log message. // // This configuration bit will likely be removed at some point. @@ -474,7 +488,6 @@ class FailureTest : public Test { const bool as_error_; }; - } // namespace std::set* GetIgnoredParameterizedTestSuites() { @@ -518,7 +531,8 @@ void InsertSyntheticTestCase(const std::string& name, CodeLocation location, "To suppress this error for this test suite, insert the following line " "(in a non-header) in the namespace it is defined in:" "\n\n" - "GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" + name + ");"; + "GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(" + + name + ");"; std::string full_name = "UninstantiatedParameterizedTestSuite<" + name + ">"; RegisterTest( // @@ -538,19 +552,18 @@ void RegisterTypeParameterizedTestSuite(const char* test_suite_name, } void RegisterTypeParameterizedTestSuiteInstantiation(const char* case_name) { - GetUnitTestImpl() - ->type_parameterized_test_registry() - .RegisterInstantiation(case_name); + GetUnitTestImpl()->type_parameterized_test_registry().RegisterInstantiation( + case_name); } void TypeParameterizedTestSuiteRegistry::RegisterTestSuite( const char* test_suite_name, CodeLocation code_location) { suites_.emplace(std::string(test_suite_name), - TypeParameterizedTestSuiteInfo(code_location)); + TypeParameterizedTestSuiteInfo(code_location)); } void TypeParameterizedTestSuiteRegistry::RegisterInstantiation( - const char* test_suite_name) { + const char* test_suite_name) { auto it = suites_.find(std::string(test_suite_name)); if (it != suites_.end()) { it->second.instantiated = true; @@ -610,6 +623,7 @@ ::std::vector GetArgvs() { #endif // defined(GTEST_CUSTOM_GET_ARGVS_) } +#if GTEST_HAS_FILE_SYSTEM // Returns the current application's name, removing directory path if that // is present. FilePath GetCurrentExecutableName() { @@ -623,6 +637,7 @@ FilePath GetCurrentExecutableName() { return result.RemoveDirectoryName(); } +#endif // GTEST_HAS_FILE_SYSTEM // Functions for processing the gtest_output flag. @@ -637,6 +652,7 @@ std::string UnitTestOptions::GetOutputFormat() { static_cast(colon - gtest_output_flag)); } +#if GTEST_HAS_FILE_SYSTEM // Returns the name of the requested output file, or the default if none // was explicitly specified. std::string UnitTestOptions::GetAbsolutePathToOutputFile() { @@ -644,16 +660,15 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() { const char* const gtest_output_flag = s.c_str(); std::string format = GetOutputFormat(); - if (format.empty()) - format = std::string(kDefaultOutputFormat); + if (format.empty()) format = std::string(kDefaultOutputFormat); const char* const colon = strchr(gtest_output_flag, ':'); if (colon == nullptr) return internal::FilePath::MakeFileName( - internal::FilePath( - UnitTest::GetInstance()->original_working_dir()), - internal::FilePath(kDefaultOutputFile), 0, - format.c_str()).string(); + internal::FilePath( + UnitTest::GetInstance()->original_working_dir()), + internal::FilePath(kDefaultOutputFile), 0, format.c_str()) + .string(); internal::FilePath output_name(colon + 1); if (!output_name.IsAbsolutePath()) @@ -661,14 +676,14 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() { internal::FilePath(UnitTest::GetInstance()->original_working_dir()), internal::FilePath(colon + 1)); - if (!output_name.IsDirectory()) - return output_name.string(); + if (!output_name.IsDirectory()) return output_name.string(); internal::FilePath result(internal::FilePath::GenerateUniqueFileName( output_name, internal::GetCurrentExecutableName(), GetOutputFormat().c_str())); return result.string(); } +#endif // GTEST_HAS_FILE_SYSTEM // Returns true if and only if the wildcard pattern matches the string. Each // pattern consists of regular characters, single-character wildcards (?), and @@ -723,60 +738,119 @@ static bool PatternMatchesString(const std::string& name_str, return true; } -bool UnitTestOptions::MatchesFilter(const std::string& name_str, - const char* filter) { - // The filter is a list of patterns separated by colons (:). - const char* pattern = filter; - while (true) { - // Find the bounds of this pattern. - const char* const next_sep = strchr(pattern, ':'); - const char* const pattern_end = - next_sep != nullptr ? next_sep : pattern + strlen(pattern); - - // Check if this pattern matches name_str. - if (PatternMatchesString(name_str, pattern, pattern_end)) { - return true; - } +namespace { + +bool IsGlobPattern(const std::string& pattern) { + return std::any_of(pattern.begin(), pattern.end(), + [](const char c) { return c == '?' || c == '*'; }); +} + +class UnitTestFilter { + public: + UnitTestFilter() = default; + + // Constructs a filter from a string of patterns separated by `:`. + explicit UnitTestFilter(const std::string& filter) { + // By design "" filter matches "" string. + std::vector all_patterns; + SplitString(filter, ':', &all_patterns); + const auto exact_match_patterns_begin = std::partition( + all_patterns.begin(), all_patterns.end(), &IsGlobPattern); + + glob_patterns_.reserve(static_cast( + std::distance(all_patterns.begin(), exact_match_patterns_begin))); + std::move(all_patterns.begin(), exact_match_patterns_begin, + std::inserter(glob_patterns_, glob_patterns_.begin())); + std::move( + exact_match_patterns_begin, all_patterns.end(), + std::inserter(exact_match_patterns_, exact_match_patterns_.begin())); + } + + // Returns true if and only if name matches at least one of the patterns in + // the filter. + bool MatchesName(const std::string& name) const { + return exact_match_patterns_.count(name) > 0 || + std::any_of(glob_patterns_.begin(), glob_patterns_.end(), + [&name](const std::string& pattern) { + return PatternMatchesString( + name, pattern.c_str(), + pattern.c_str() + pattern.size()); + }); + } + + private: + std::vector glob_patterns_; + std::unordered_set exact_match_patterns_; +}; - // Give up on this pattern. However, if we found a pattern separator (:), - // advance to the next pattern (skipping over the separator) and restart. - if (next_sep == nullptr) { - return false; +class PositiveAndNegativeUnitTestFilter { + public: + // Constructs a positive and a negative filter from a string. The string + // contains a positive filter optionally followed by a '-' character and a + // negative filter. In case only a negative filter is provided the positive + // filter will be assumed "*". + // A filter is a list of patterns separated by ':'. + explicit PositiveAndNegativeUnitTestFilter(const std::string& filter) { + std::vector positive_and_negative_filters; + + // NOTE: `SplitString` always returns a non-empty container. + SplitString(filter, '-', &positive_and_negative_filters); + const auto& positive_filter = positive_and_negative_filters.front(); + + if (positive_and_negative_filters.size() > 1) { + positive_filter_ = UnitTestFilter( + positive_filter.empty() ? kUniversalFilter : positive_filter); + + // TODO(b/214626361): Fail on multiple '-' characters + // For the moment to preserve old behavior we concatenate the rest of the + // string parts with `-` as separator to generate the negative filter. + auto negative_filter_string = positive_and_negative_filters[1]; + for (std::size_t i = 2; i < positive_and_negative_filters.size(); i++) + negative_filter_string = + negative_filter_string + '-' + positive_and_negative_filters[i]; + negative_filter_ = UnitTestFilter(negative_filter_string); + } else { + // In case we don't have a negative filter and positive filter is "" + // we do not use kUniversalFilter by design as opposed to when we have a + // negative filter. + positive_filter_ = UnitTestFilter(positive_filter); } - pattern = next_sep + 1; } - return true; + + // Returns true if and only if test name (this is generated by appending test + // suit name and test name via a '.' character) matches the positive filter + // and does not match the negative filter. + bool MatchesTest(const std::string& test_suite_name, + const std::string& test_name) const { + return MatchesName(test_suite_name + "." + test_name); + } + + // Returns true if and only if name matches the positive filter and does not + // match the negative filter. + bool MatchesName(const std::string& name) const { + return positive_filter_.MatchesName(name) && + !negative_filter_.MatchesName(name); + } + + private: + UnitTestFilter positive_filter_; + UnitTestFilter negative_filter_; +}; +} // namespace + +bool UnitTestOptions::MatchesFilter(const std::string& name_str, + const char* filter) { + return UnitTestFilter(filter).MatchesName(name_str); } // Returns true if and only if the user-specified filter matches the test // suite name and the test name. bool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name, const std::string& test_name) { - const std::string& full_name = test_suite_name + "." + test_name.c_str(); - // Split --gtest_filter at '-', if there is one, to separate into // positive filter and negative filter portions - std::string str = GTEST_FLAG_GET(filter); - const char* const p = str.c_str(); - const char* const dash = strchr(p, '-'); - std::string positive; - std::string negative; - if (dash == nullptr) { - positive = str.c_str(); // Whole string is a positive filter - negative = ""; - } else { - positive = std::string(p, dash); // Everything up to the dash - negative = std::string(dash + 1); // Everything after the dash - if (positive.empty()) { - // Treat '-test1' as the same as '*-test1' - positive = kUniversalFilter; - } - } - - // A filter is a colon-separated list of patterns. It matches a - // test if any pattern in it matches the test. - return (MatchesFilter(full_name, positive.c_str()) && - !MatchesFilter(full_name, negative.c_str())); + return PositiveAndNegativeUnitTestFilter(GTEST_FLAG_GET(filter)) + .MatchesTest(test_suite_name, test_name); } #if GTEST_HAS_SEH @@ -814,8 +888,7 @@ int UnitTestOptions::GTestShouldProcessSEH(DWORD exception_code) { // results. Intercepts only failures from the current thread. ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( TestPartResultArray* result) - : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD), - result_(result) { + : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD), result_(result) { Init(); } @@ -824,8 +897,7 @@ ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( // results. ScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter( InterceptMode intercept_mode, TestPartResultArray* result) - : intercept_mode_(intercept_mode), - result_(result) { + : intercept_mode_(intercept_mode), result_(result) { Init(); } @@ -869,9 +941,7 @@ namespace internal { // from user test code. GetTestTypeId() is guaranteed to always // return the same value, as it always calls GetTypeId<>() from the // gtest.cc, which is within the Google Test framework. -TypeId GetTestTypeId() { - return GetTypeId(); -} +TypeId GetTestTypeId() { return GetTypeId(); } // The value of GetTestTypeId() as seen from within the Google Test // library. This is solely for testing GetTestTypeId(). @@ -886,9 +956,9 @@ static AssertionResult HasOneFailure(const char* /* results_expr */, const TestPartResultArray& results, TestPartResult::Type type, const std::string& substr) { - const std::string expected(type == TestPartResult::kFatalFailure ? - "1 fatal failure" : - "1 non-fatal failure"); + const std::string expected(type == TestPartResult::kFatalFailure + ? "1 fatal failure" + : "1 non-fatal failure"); Message msg; if (results.size() != 1) { msg << "Expected: " << expected << "\n" @@ -907,10 +977,10 @@ static AssertionResult HasOneFailure(const char* /* results_expr */, } if (strstr(r.message(), substr.c_str()) == nullptr) { - return AssertionFailure() << "Expected: " << expected << " containing \"" - << substr << "\"\n" - << " Actual:\n" - << r; + return AssertionFailure() + << "Expected: " << expected << " containing \"" << substr << "\"\n" + << " Actual:\n" + << r; } return AssertionSuccess(); @@ -933,7 +1003,8 @@ SingleFailureChecker::~SingleFailureChecker() { } DefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter( - UnitTestImpl* unit_test) : unit_test_(unit_test) {} + UnitTestImpl* unit_test) + : unit_test_(unit_test) {} void DefaultGlobalTestPartResultReporter::ReportTestPartResult( const TestPartResult& result) { @@ -942,7 +1013,8 @@ void DefaultGlobalTestPartResultReporter::ReportTestPartResult( } DefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter( - UnitTestImpl* unit_test) : unit_test_(unit_test) {} + UnitTestImpl* unit_test) + : unit_test_(unit_test) {} void DefaultPerThreadTestPartResultReporter::ReportTestPartResult( const TestPartResult& result) { @@ -953,14 +1025,14 @@ void DefaultPerThreadTestPartResultReporter::ReportTestPartResult( TestPartResultReporterInterface* UnitTestImpl::GetGlobalTestPartResultReporter() { internal::MutexLock lock(&global_test_part_result_reporter_mutex_); - return global_test_part_result_repoter_; + return global_test_part_result_reporter_; } // Sets the global test part result reporter. void UnitTestImpl::SetGlobalTestPartResultReporter( TestPartResultReporterInterface* reporter) { internal::MutexLock lock(&global_test_part_result_reporter_mutex_); - global_test_part_result_repoter_ = reporter; + global_test_part_result_reporter_ = reporter; } // Returns the test part result reporter for the current thread. @@ -1058,17 +1130,24 @@ std::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) { // A helper class for measuring elapsed times. class Timer { public: - Timer() : start_(std::chrono::steady_clock::now()) {} + Timer() : start_(clock::now()) {} // Return time elapsed in milliseconds since the timer was created. TimeInMillis Elapsed() { return std::chrono::duration_cast( - std::chrono::steady_clock::now() - start_) + clock::now() - start_) .count(); } private: - std::chrono::steady_clock::time_point start_; + // Fall back to the system_clock when building with newlib on a system + // without a monotonic clock. +#if defined(_NEWLIB_VERSION) && !defined(CLOCK_MONOTONIC) + using clock = std::chrono::system_clock; +#else + using clock = std::chrono::steady_clock; +#endif + clock::time_point start_; }; // Returns a timestamp as milliseconds since the epoch. Note this time may jump @@ -1096,8 +1175,7 @@ LPCWSTR String::AnsiToUtf16(const char* ansi) { const int unicode_length = MultiByteToWideChar(CP_ACP, 0, ansi, length, nullptr, 0); WCHAR* unicode = new WCHAR[unicode_length + 1]; - MultiByteToWideChar(CP_ACP, 0, ansi, length, - unicode, unicode_length); + MultiByteToWideChar(CP_ACP, 0, ansi, length, unicode, unicode_length); unicode[unicode_length] = 0; return unicode; } @@ -1106,7 +1184,7 @@ LPCWSTR String::AnsiToUtf16(const char* ansi) { // memory using new. The caller is responsible for deleting the return // value using delete[]. Returns the ANSI string, or NULL if the // input is NULL. -const char* String::Utf16ToAnsi(LPCWSTR utf16_str) { +const char* String::Utf16ToAnsi(LPCWSTR utf16_str) { if (!utf16_str) return nullptr; const int ansi_length = WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, nullptr, 0, nullptr, nullptr); @@ -1125,7 +1203,7 @@ const char* String::Utf16ToAnsi(LPCWSTR utf16_str) { // Unlike strcmp(), this function can handle NULL argument(s). A NULL // C string is considered different to any non-NULL C string, // including the empty string. -bool String::CStringEquals(const char * lhs, const char * rhs) { +bool String::CStringEquals(const char* lhs, const char* rhs) { if (lhs == nullptr) return rhs == nullptr; if (rhs == nullptr) return false; @@ -1139,11 +1217,10 @@ bool String::CStringEquals(const char * lhs, const char * rhs) { // encoding, and streams the result to the given Message object. static void StreamWideCharsToMessage(const wchar_t* wstr, size_t length, Message* msg) { - for (size_t i = 0; i != length; ) { // NOLINT + for (size_t i = 0; i != length;) { // NOLINT if (wstr[i] != L'\0') { *msg << WideStringToUtf8(wstr + i, static_cast(length - i)); - while (i != length && wstr[i] != L'\0') - i++; + while (i != length && wstr[i] != L'\0') i++; } else { *msg << '\0'; i++; @@ -1185,17 +1262,17 @@ Message::Message() : ss_(new ::std::stringstream) { // These two overloads allow streaming a wide C string to a Message // using the UTF-8 encoding. -Message& Message::operator <<(const wchar_t* wide_c_str) { +Message& Message::operator<<(const wchar_t* wide_c_str) { return *this << internal::String::ShowWideCString(wide_c_str); } -Message& Message::operator <<(wchar_t* wide_c_str) { +Message& Message::operator<<(wchar_t* wide_c_str) { return *this << internal::String::ShowWideCString(wide_c_str); } #if GTEST_HAS_STD_WSTRING // Converts the given wide string to a narrow string using the UTF-8 // encoding, and streams the result to this Message object. -Message& Message::operator <<(const ::std::wstring& wstr) { +Message& Message::operator<<(const ::std::wstring& wstr) { internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this); return *this; } @@ -1207,44 +1284,6 @@ std::string Message::GetString() const { return internal::StringStreamToString(ss_.get()); } -// AssertionResult constructors. -// Used in EXPECT_TRUE/FALSE(assertion_result). -AssertionResult::AssertionResult(const AssertionResult& other) - : success_(other.success_), - message_(other.message_.get() != nullptr - ? new ::std::string(*other.message_) - : static_cast< ::std::string*>(nullptr)) {} - -// Swaps two AssertionResults. -void AssertionResult::swap(AssertionResult& other) { - using std::swap; - swap(success_, other.success_); - swap(message_, other.message_); -} - -// Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE. -AssertionResult AssertionResult::operator!() const { - AssertionResult negation(!success_); - if (message_.get() != nullptr) negation << *message_; - return negation; -} - -// Makes a successful assertion result. -AssertionResult AssertionSuccess() { - return AssertionResult(true); -} - -// Makes a failed assertion result. -AssertionResult AssertionFailure() { - return AssertionResult(false); -} - -// Makes a failed assertion result with the given failure message. -// Deprecated; use AssertionFailure() << message. -AssertionResult AssertionFailure(const Message& message) { - return AssertionFailure() << message; -} - namespace internal { namespace edit_distance { @@ -1536,8 +1575,7 @@ std::vector SplitEscapedString(const std::string& str) { AssertionResult EqFailure(const char* lhs_expression, const char* rhs_expression, const std::string& lhs_value, - const std::string& rhs_value, - bool ignoring_case) { + const std::string& rhs_value, bool ignoring_case) { Message msg; msg << "Expected equality of these values:"; msg << "\n " << lhs_expression; @@ -1554,10 +1592,8 @@ AssertionResult EqFailure(const char* lhs_expression, } if (!lhs_value.empty() && !rhs_value.empty()) { - const std::vector lhs_lines = - SplitEscapedString(lhs_value); - const std::vector rhs_lines = - SplitEscapedString(rhs_value); + const std::vector lhs_lines = SplitEscapedString(lhs_value); + const std::vector rhs_lines = SplitEscapedString(rhs_value); if (lhs_lines.size() > 1 || rhs_lines.size() > 1) { msg << "\nWith diff:\n" << edit_distance::CreateUnifiedDiff(lhs_lines, rhs_lines); @@ -1569,27 +1605,21 @@ AssertionResult EqFailure(const char* lhs_expression, // Constructs a failure message for Boolean assertions such as EXPECT_TRUE. std::string GetBoolAssertionFailureMessage( - const AssertionResult& assertion_result, - const char* expression_text, - const char* actual_predicate_value, - const char* expected_predicate_value) { + const AssertionResult& assertion_result, const char* expression_text, + const char* actual_predicate_value, const char* expected_predicate_value) { const char* actual_message = assertion_result.message(); Message msg; msg << "Value of: " << expression_text << "\n Actual: " << actual_predicate_value; - if (actual_message[0] != '\0') - msg << " (" << actual_message << ")"; + if (actual_message[0] != '\0') msg << " (" << actual_message << ")"; msg << "\nExpected: " << expected_predicate_value; return msg.GetString(); } // Helper function for implementing ASSERT_NEAR. -AssertionResult DoubleNearPredFormat(const char* expr1, - const char* expr2, - const char* abs_error_expr, - double val1, - double val2, - double abs_error) { +AssertionResult DoubleNearPredFormat(const char* expr1, const char* expr2, + const char* abs_error_expr, double val1, + double val2, double abs_error) { const double diff = fabs(val1 - val2); if (diff <= abs_error) return AssertionSuccess(); @@ -1619,20 +1649,17 @@ AssertionResult DoubleNearPredFormat(const char* expr1, "EXPECT_EQUAL. Consider using EXPECT_DOUBLE_EQ instead."; } return AssertionFailure() - << "The difference between " << expr1 << " and " << expr2 - << " is " << diff << ", which exceeds " << abs_error_expr << ", where\n" - << expr1 << " evaluates to " << val1 << ",\n" - << expr2 << " evaluates to " << val2 << ", and\n" - << abs_error_expr << " evaluates to " << abs_error << "."; + << "The difference between " << expr1 << " and " << expr2 << " is " + << diff << ", which exceeds " << abs_error_expr << ", where\n" + << expr1 << " evaluates to " << val1 << ",\n" + << expr2 << " evaluates to " << val2 << ", and\n" + << abs_error_expr << " evaluates to " << abs_error << "."; } - // Helper template for implementing FloatLE() and DoubleLE(). template -AssertionResult FloatingPointLE(const char* expr1, - const char* expr2, - RawType val1, - RawType val2) { +AssertionResult FloatingPointLE(const char* expr1, const char* expr2, + RawType val1, RawType val2) { // Returns success if val1 is less than val2, if (val1 < val2) { return AssertionSuccess(); @@ -1657,24 +1684,24 @@ AssertionResult FloatingPointLE(const char* expr1, << val2; return AssertionFailure() - << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n" - << " Actual: " << StringStreamToString(&val1_ss) << " vs " - << StringStreamToString(&val2_ss); + << "Expected: (" << expr1 << ") <= (" << expr2 << ")\n" + << " Actual: " << StringStreamToString(&val1_ss) << " vs " + << StringStreamToString(&val2_ss); } } // namespace internal // Asserts that val1 is less than, or almost equal to, val2. Fails // otherwise. In particular, it fails if either val1 or val2 is NaN. -AssertionResult FloatLE(const char* expr1, const char* expr2, - float val1, float val2) { +AssertionResult FloatLE(const char* expr1, const char* expr2, float val1, + float val2) { return internal::FloatingPointLE(expr1, expr2, val1, val2); } // Asserts that val1 is less than, or almost equal to, val2. Fails // otherwise. In particular, it fails if either val1 or val2 is NaN. -AssertionResult DoubleLE(const char* expr1, const char* expr2, - double val1, double val2) { +AssertionResult DoubleLE(const char* expr1, const char* expr2, double val1, + double val2) { return internal::FloatingPointLE(expr1, expr2, val1, val2); } @@ -1682,62 +1709,51 @@ namespace internal { // The helper function for {ASSERT|EXPECT}_STREQ. AssertionResult CmpHelperSTREQ(const char* lhs_expression, - const char* rhs_expression, - const char* lhs, + const char* rhs_expression, const char* lhs, const char* rhs) { if (String::CStringEquals(lhs, rhs)) { return AssertionSuccess(); } - return EqFailure(lhs_expression, - rhs_expression, - PrintToString(lhs), - PrintToString(rhs), - false); + return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), + PrintToString(rhs), false); } // The helper function for {ASSERT|EXPECT}_STRCASEEQ. AssertionResult CmpHelperSTRCASEEQ(const char* lhs_expression, - const char* rhs_expression, - const char* lhs, + const char* rhs_expression, const char* lhs, const char* rhs) { if (String::CaseInsensitiveCStringEquals(lhs, rhs)) { return AssertionSuccess(); } - return EqFailure(lhs_expression, - rhs_expression, - PrintToString(lhs), - PrintToString(rhs), - true); + return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), + PrintToString(rhs), true); } // The helper function for {ASSERT|EXPECT}_STRNE. AssertionResult CmpHelperSTRNE(const char* s1_expression, - const char* s2_expression, - const char* s1, + const char* s2_expression, const char* s1, const char* s2) { if (!String::CStringEquals(s1, s2)) { return AssertionSuccess(); } else { - return AssertionFailure() << "Expected: (" << s1_expression << ") != (" - << s2_expression << "), actual: \"" - << s1 << "\" vs \"" << s2 << "\""; + return AssertionFailure() + << "Expected: (" << s1_expression << ") != (" << s2_expression + << "), actual: \"" << s1 << "\" vs \"" << s2 << "\""; } } // The helper function for {ASSERT|EXPECT}_STRCASENE. AssertionResult CmpHelperSTRCASENE(const char* s1_expression, - const char* s2_expression, - const char* s1, + const char* s2_expression, const char* s1, const char* s2) { if (!String::CaseInsensitiveCStringEquals(s1, s2)) { return AssertionSuccess(); } else { return AssertionFailure() - << "Expected: (" << s1_expression << ") != (" - << s2_expression << ") (ignoring case), actual: \"" - << s1 << "\" vs \"" << s2 << "\""; + << "Expected: (" << s1_expression << ") != (" << s2_expression + << ") (ignoring case), actual: \"" << s1 << "\" vs \"" << s2 << "\""; } } @@ -1765,8 +1781,7 @@ bool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) { // StringType here can be either ::std::string or ::std::wstring. template -bool IsSubstringPred(const StringType& needle, - const StringType& haystack) { +bool IsSubstringPred(const StringType& needle, const StringType& haystack) { return haystack.find(needle) != StringType::npos; } @@ -1775,21 +1790,22 @@ bool IsSubstringPred(const StringType& needle, // StringType here can be const char*, const wchar_t*, ::std::string, // or ::std::wstring. template -AssertionResult IsSubstringImpl( - bool expected_to_be_substring, - const char* needle_expr, const char* haystack_expr, - const StringType& needle, const StringType& haystack) { +AssertionResult IsSubstringImpl(bool expected_to_be_substring, + const char* needle_expr, + const char* haystack_expr, + const StringType& needle, + const StringType& haystack) { if (IsSubstringPred(needle, haystack) == expected_to_be_substring) return AssertionSuccess(); const bool is_wide_string = sizeof(needle[0]) > 1; const char* const begin_string_quote = is_wide_string ? "L\"" : "\""; return AssertionFailure() - << "Value of: " << needle_expr << "\n" - << " Actual: " << begin_string_quote << needle << "\"\n" - << "Expected: " << (expected_to_be_substring ? "" : "not ") - << "a substring of " << haystack_expr << "\n" - << "Which is: " << begin_string_quote << haystack << "\""; + << "Value of: " << needle_expr << "\n" + << " Actual: " << begin_string_quote << needle << "\"\n" + << "Expected: " << (expected_to_be_substring ? "" : "not ") + << "a substring of " << haystack_expr << "\n" + << "Which is: " << begin_string_quote << haystack << "\""; } } // namespace @@ -1798,52 +1814,52 @@ AssertionResult IsSubstringImpl( // substring of haystack (NULL is considered a substring of itself // only), and return an appropriate error message when they fail. -AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const char* needle, const char* haystack) { +AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, + const char* needle, const char* haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const wchar_t* needle, const wchar_t* haystack) { +AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, + const wchar_t* needle, const wchar_t* haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const char* needle, const char* haystack) { +AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, const char* needle, + const char* haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const wchar_t* needle, const wchar_t* haystack) { +AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, const wchar_t* needle, + const wchar_t* haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::string& needle, const ::std::string& haystack) { +AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, + const ::std::string& needle, + const ::std::string& haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::string& needle, const ::std::string& haystack) { +AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::string& needle, + const ::std::string& haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } #if GTEST_HAS_STD_WSTRING -AssertionResult IsSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::wstring& needle, const ::std::wstring& haystack) { +AssertionResult IsSubstring(const char* needle_expr, const char* haystack_expr, + const ::std::wstring& needle, + const ::std::wstring& haystack) { return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack); } -AssertionResult IsNotSubstring( - const char* needle_expr, const char* haystack_expr, - const ::std::wstring& needle, const ::std::wstring& haystack) { +AssertionResult IsNotSubstring(const char* needle_expr, + const char* haystack_expr, + const ::std::wstring& needle, + const ::std::wstring& haystack) { return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack); } #endif // GTEST_HAS_STD_WSTRING @@ -1855,43 +1871,42 @@ namespace internal { namespace { // Helper function for IsHRESULT{SuccessFailure} predicates -AssertionResult HRESULTFailureHelper(const char* expr, - const char* expected, +AssertionResult HRESULTFailureHelper(const char* expr, const char* expected, long hr) { // NOLINT -# if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_TV_TITLE +#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_TV_TITLE // Windows CE doesn't support FormatMessage. const char error_text[] = ""; -# else +#else // Looks up the human-readable system message for the HRESULT code // and since we're not passing any params to FormatMessage, we don't // want inserts expanded. - const DWORD kFlags = FORMAT_MESSAGE_FROM_SYSTEM | - FORMAT_MESSAGE_IGNORE_INSERTS; + const DWORD kFlags = + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS; const DWORD kBufSize = 4096; // Gets the system's human readable message string for this HRESULT. - char error_text[kBufSize] = { '\0' }; + char error_text[kBufSize] = {'\0'}; DWORD message_length = ::FormatMessageA(kFlags, - 0, // no source, we're asking system + 0, // no source, we're asking system static_cast(hr), // the error - 0, // no line width restrictions + 0, // no line width restrictions error_text, // output buffer kBufSize, // buf size nullptr); // no arguments for inserts // Trims tailing white space (FormatMessage leaves a trailing CR-LF) for (; message_length && IsSpace(error_text[message_length - 1]); - --message_length) { + --message_length) { error_text[message_length - 1] = '\0'; } -# endif // GTEST_OS_WINDOWS_MOBILE +#endif // GTEST_OS_WINDOWS_MOBILE const std::string error_hex("0x" + String::FormatHexInt(hr)); return ::testing::AssertionFailure() - << "Expected: " << expr << " " << expected << ".\n" - << " Actual: " << error_hex << " " << error_text << "\n"; + << "Expected: " << expr << " " << expected << ".\n" + << " Actual: " << error_hex << " " << error_text << "\n"; } } // namespace @@ -1925,16 +1940,18 @@ AssertionResult IsHRESULTFailure(const char* expr, long hr) { // NOLINT // 17 - 21 bits 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx // The maximum code-point a one-byte UTF-8 sequence can represent. -constexpr uint32_t kMaxCodePoint1 = (static_cast(1) << 7) - 1; +constexpr uint32_t kMaxCodePoint1 = (static_cast(1) << 7) - 1; // The maximum code-point a two-byte UTF-8 sequence can represent. constexpr uint32_t kMaxCodePoint2 = (static_cast(1) << (5 + 6)) - 1; // The maximum code-point a three-byte UTF-8 sequence can represent. -constexpr uint32_t kMaxCodePoint3 = (static_cast(1) << (4 + 2*6)) - 1; +constexpr uint32_t kMaxCodePoint3 = + (static_cast(1) << (4 + 2 * 6)) - 1; // The maximum code-point a four-byte UTF-8 sequence can represent. -constexpr uint32_t kMaxCodePoint4 = (static_cast(1) << (3 + 3*6)) - 1; +constexpr uint32_t kMaxCodePoint4 = + (static_cast(1) << (3 + 3 * 6)) - 1; // Chops off the n lowest bits from a bit pattern. Returns the n // lowest bits. As a side effect, the original bit pattern will be @@ -1959,7 +1976,7 @@ std::string CodePointToUtf8(uint32_t code_point) { char str[5]; // Big enough for the largest valid code point. if (code_point <= kMaxCodePoint1) { str[1] = '\0'; - str[0] = static_cast(code_point); // 0xxxxxxx + str[0] = static_cast(code_point); // 0xxxxxxx } else if (code_point <= kMaxCodePoint2) { str[2] = '\0'; str[1] = static_cast(0x80 | ChopLowBits(&code_point, 6)); // 10xxxxxx @@ -1987,8 +2004,8 @@ std::string CodePointToUtf8(uint32_t code_point) { // and thus should be combined into a single Unicode code point // using CreateCodePointFromUtf16SurrogatePair. inline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) { - return sizeof(wchar_t) == 2 && - (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00; + return sizeof(wchar_t) == 2 && (first & 0xFC00) == 0xD800 && + (second & 0xFC00) == 0xDC00; } // Creates a Unicode code point from UTF16 surrogate pair. @@ -2019,8 +2036,7 @@ inline uint32_t CreateCodePointFromUtf16SurrogatePair(wchar_t first, // and contains invalid UTF-16 surrogate pairs, values in those pairs // will be encoded as individual Unicode characters from Basic Normal Plane. std::string WideStringToUtf8(const wchar_t* str, int num_chars) { - if (num_chars == -1) - num_chars = static_cast(wcslen(str)); + if (num_chars == -1) num_chars = static_cast(wcslen(str)); ::std::stringstream stream; for (int i = 0; i < num_chars; ++i) { @@ -2029,8 +2045,8 @@ std::string WideStringToUtf8(const wchar_t* str, int num_chars) { if (str[i] == L'\0') { break; } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) { - unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i], - str[i + 1]); + unicode_code_point = + CreateCodePointFromUtf16SurrogatePair(str[i], str[i + 1]); i++; } else { unicode_code_point = static_cast(str[i]); @@ -2043,7 +2059,7 @@ std::string WideStringToUtf8(const wchar_t* str, int num_chars) { // Converts a wide C string to an std::string using the UTF-8 encoding. // NULL will be converted to "(null)". -std::string String::ShowWideCString(const wchar_t * wide_c_str) { +std::string String::ShowWideCString(const wchar_t* wide_c_str) { if (wide_c_str == nullptr) return "(null)"; return internal::WideStringToUtf8(wide_c_str, -1); @@ -2055,7 +2071,7 @@ std::string String::ShowWideCString(const wchar_t * wide_c_str) { // Unlike wcscmp(), this function can handle NULL argument(s). A NULL // C string is considered different to any non-NULL C string, // including the empty string. -bool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) { +bool String::WideCStringEquals(const wchar_t* lhs, const wchar_t* rhs) { if (lhs == nullptr) return rhs == nullptr; if (rhs == nullptr) return false; @@ -2065,33 +2081,27 @@ bool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) { // Helper function for *_STREQ on wide strings. AssertionResult CmpHelperSTREQ(const char* lhs_expression, - const char* rhs_expression, - const wchar_t* lhs, + const char* rhs_expression, const wchar_t* lhs, const wchar_t* rhs) { if (String::WideCStringEquals(lhs, rhs)) { return AssertionSuccess(); } - return EqFailure(lhs_expression, - rhs_expression, - PrintToString(lhs), - PrintToString(rhs), - false); + return EqFailure(lhs_expression, rhs_expression, PrintToString(lhs), + PrintToString(rhs), false); } // Helper function for *_STRNE on wide strings. AssertionResult CmpHelperSTRNE(const char* s1_expression, - const char* s2_expression, - const wchar_t* s1, + const char* s2_expression, const wchar_t* s1, const wchar_t* s2) { if (!String::WideCStringEquals(s1, s2)) { return AssertionSuccess(); } - return AssertionFailure() << "Expected: (" << s1_expression << ") != (" - << s2_expression << "), actual: " - << PrintToString(s1) - << " vs " << PrintToString(s2); + return AssertionFailure() + << "Expected: (" << s1_expression << ") != (" << s2_expression + << "), actual: " << PrintToString(s1) << " vs " << PrintToString(s2); } // Compares two C strings, ignoring case. Returns true if and only if they have @@ -2100,7 +2110,7 @@ AssertionResult CmpHelperSTRNE(const char* s1_expression, // Unlike strcasecmp(), this function can handle NULL argument(s). A // NULL C string is considered different to any non-NULL C string, // including the empty string. -bool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) { +bool String::CaseInsensitiveCStringEquals(const char* lhs, const char* rhs) { if (lhs == nullptr) return rhs == nullptr; if (rhs == nullptr) return false; return posix::StrCaseCmp(lhs, rhs) == 0; @@ -2142,8 +2152,8 @@ bool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs, // Returns true if and only if str ends with the given suffix, ignoring case. // Any string is considered to end with an empty suffix. -bool String::EndsWithCaseInsensitive( - const std::string& str, const std::string& suffix) { +bool String::EndsWithCaseInsensitive(const std::string& str, + const std::string& suffix) { const size_t str_len = str.length(); const size_t suffix_len = suffix.length(); return (str_len >= suffix_len) && @@ -2226,15 +2236,13 @@ TestResult::TestResult() : death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {} // D'tor. -TestResult::~TestResult() { -} +TestResult::~TestResult() {} // Returns the i-th test part result among all the results. i can // range from 0 to total_part_count() - 1. If i is not in that range, // aborts the program. const TestPartResult& TestResult::GetTestPartResult(int i) const { - if (i < 0 || i >= total_part_count()) - internal::posix::Abort(); + if (i < 0 || i >= total_part_count()) internal::posix::Abort(); return test_part_results_.at(static_cast(i)); } @@ -2242,15 +2250,12 @@ const TestPartResult& TestResult::GetTestPartResult(int i) const { // test_property_count() - 1. If i is not in that range, aborts the // program. const TestProperty& TestResult::GetTestProperty(int i) const { - if (i < 0 || i >= test_property_count()) - internal::posix::Abort(); + if (i < 0 || i >= test_property_count()) internal::posix::Abort(); return test_properties_.at(static_cast(i)); } // Clears the test part results. -void TestResult::ClearTestPartResults() { - test_part_results_.clear(); -} +void TestResult::ClearTestPartResults() { test_part_results_.clear(); } // Adds a test part result to the list. void TestResult::AddTestPartResult(const TestPartResult& test_part_result) { @@ -2279,15 +2284,8 @@ void TestResult::RecordProperty(const std::string& xml_element, // The list of reserved attributes used in the element of XML // output. static const char* const kReservedTestSuitesAttributes[] = { - "disabled", - "errors", - "failures", - "name", - "random_seed", - "tests", - "time", - "timestamp" -}; + "disabled", "errors", "failures", "name", + "random_seed", "tests", "time", "timestamp"}; // The list of reserved attributes used in the element of XML // output. @@ -2297,8 +2295,8 @@ static const char* const kReservedTestSuiteAttributes[] = { // The list of reserved attributes used in the element of XML output. static const char* const kReservedTestCaseAttributes[] = { - "classname", "name", "status", "time", "type_param", - "value_param", "file", "line"}; + "classname", "name", "status", "time", + "type_param", "value_param", "file", "line"}; // Use a slightly different set for allowed output to ensure existing tests can // still RecordProperty("result") or "RecordProperty(timestamp") @@ -2360,7 +2358,7 @@ static bool ValidateTestPropertyName( const std::string& property_name, const std::vector& reserved_names) { if (std::find(reserved_names.begin(), reserved_names.end(), property_name) != - reserved_names.end()) { + reserved_names.end()) { ADD_FAILURE() << "Reserved key used in RecordProperty(): " << property_name << " (" << FormatWordList(reserved_names) << " are reserved by " << GTEST_NAME_ << ")"; @@ -2398,8 +2396,7 @@ bool TestResult::Skipped() const { // Returns true if and only if the test failed. bool TestResult::Failed() const { for (int i = 0; i < total_part_count(); ++i) { - if (GetTestPartResult(i).failed()) - return true; + if (GetTestPartResult(i).failed()) return true; } return false; } @@ -2440,38 +2437,31 @@ int TestResult::test_property_count() const { // Creates a Test object. // The c'tor saves the states of all flags. -Test::Test() - : gtest_flag_saver_(new GTEST_FLAG_SAVER_) { -} +Test::Test() : gtest_flag_saver_(new GTEST_FLAG_SAVER_) {} // The d'tor restores the states of all flags. The actual work is // done by the d'tor of the gtest_flag_saver_ field, and thus not // visible here. -Test::~Test() { -} +Test::~Test() {} // Sets up the test fixture. // // A sub-class may override this. -void Test::SetUp() { -} +void Test::SetUp() {} // Tears down the test fixture. // // A sub-class may override this. -void Test::TearDown() { -} +void Test::TearDown() {} // Allows user supplied key value pairs to be recorded for later output. void Test::RecordProperty(const std::string& key, const std::string& value) { UnitTest::GetInstance()->RecordProperty(key, value); } - -// Allows user supplied key value pairs to be recorded for later output. -void Test::RecordProperty(const std::string& key, int value) { - Message value_message; - value_message << value; - RecordProperty(key, value_message.GetString().c_str()); +// We do not define a customary serialization except for integers, +// but other values could be logged in this way. +void Test::RecordProperty(const std::string& key, int64_t value) { + RecordProperty(key, (Message() << value).GetString()); } namespace internal { @@ -2565,8 +2555,8 @@ bool Test::HasSameFixtureClass() { static std::string* FormatSehExceptionMessage(DWORD exception_code, const char* location) { Message message; - message << "SEH exception with code 0x" << std::setbase(16) << - exception_code << std::setbase(10) << " thrown in " << location << "."; + message << "SEH exception with code 0x" << std::setbase(16) << exception_code + << std::setbase(10) << " thrown in " << location << "."; return new std::string(message.GetString()); } @@ -2609,8 +2599,8 @@ GoogleTestFailureException::GoogleTestFailureException( // exceptions in the same function. Therefore, we provide a separate // wrapper function for handling SEH exceptions.) template -Result HandleSehExceptionsInMethodIfSupported( - T* object, Result (T::*method)(), const char* location) { +Result HandleSehExceptionsInMethodIfSupported(T* object, Result (T::*method)(), + const char* location) { #if GTEST_HAS_SEH __try { return (object->*method)(); @@ -2619,8 +2609,8 @@ Result HandleSehExceptionsInMethodIfSupported( // We create the exception message on the heap because VC++ prohibits // creation of objects with destructors on stack in functions using __try // (see error C2712). - std::string* exception_message = FormatSehExceptionMessage( - GetExceptionCode(), location); + std::string* exception_message = + FormatSehExceptionMessage(GetExceptionCode(), location); internal::ReportFailureInUnknownLocation(TestPartResult::kFatalFailure, *exception_message); delete exception_message; @@ -2636,8 +2626,8 @@ Result HandleSehExceptionsInMethodIfSupported( // exceptions, if they are supported; returns the 0-value for type // Result in case of an SEH exception. template -Result HandleExceptionsInMethodIfSupported( - T* object, Result (T::*method)(), const char* location) { +Result HandleExceptionsInMethodIfSupported(T* object, Result (T::*method)(), + const char* location) { // NOTE: The user code can affect the way in which Google Test handles // exceptions by setting GTEST_FLAG(catch_exceptions), but only before // RUN_ALL_TESTS() starts. It is technically possible to check the flag @@ -2703,16 +2693,16 @@ void Test::Run() { // GTEST_SKIP(). if (!HasFatalFailure() && !IsSkipped()) { impl->os_stack_trace_getter()->UponLeavingGTest(); - internal::HandleExceptionsInMethodIfSupported( - this, &Test::TestBody, "the test body"); + internal::HandleExceptionsInMethodIfSupported(this, &Test::TestBody, + "the test body"); } // However, we want to clean up as much as possible. Hence we will // always call TearDown(), even if SetUp() or the test body has // failed. impl->os_stack_trace_getter()->UponLeavingGTest(); - internal::HandleExceptionsInMethodIfSupported( - this, &Test::TearDown, "TearDown()"); + internal::HandleExceptionsInMethodIfSupported(this, &Test::TearDown, + "TearDown()"); } // Returns true if and only if the current test has a fatal failure. @@ -2722,8 +2712,9 @@ bool Test::HasFatalFailure() { // Returns true if and only if the current test has a non-fatal failure. bool Test::HasNonfatalFailure() { - return internal::GetUnitTestImpl()->current_test_result()-> - HasNonfatalFailure(); + return internal::GetUnitTestImpl() + ->current_test_result() + ->HasNonfatalFailure(); } // Returns true if and only if the current test was skipped. @@ -2742,7 +2733,8 @@ TestInfo::TestInfo(const std::string& a_test_suite_name, internal::TypeId fixture_class_id, internal::TestFactoryBase* factory) : test_suite_name_(a_test_suite_name), - name_(a_name), + // begin()/end() is MSVC 17.3.3 ASAN crash workaround (GitHub issue #3997) + name_(a_name.begin(), a_name.end()), type_param_(a_type_param ? new std::string(a_type_param) : nullptr), value_param_(a_value_param ? new std::string(a_value_param) : nullptr), location_(a_code_location), @@ -2806,38 +2798,6 @@ void ReportInvalidTestSuiteType(const char* test_suite_name, code_location.line) << " " << errors.GetString(); } -} // namespace internal - -namespace { - -// A predicate that checks the test name of a TestInfo against a known -// value. -// -// This is used for implementation of the TestSuite class only. We put -// it in the anonymous namespace to prevent polluting the outer -// namespace. -// -// TestNameIs is copyable. -class TestNameIs { - public: - // Constructor. - // - // TestNameIs has NO default constructor. - explicit TestNameIs(const char* name) - : name_(name) {} - - // Returns true if and only if the test name of test_info matches name_. - bool operator()(const TestInfo * test_info) const { - return test_info && test_info->name() == name_; - } - - private: - std::string name_; -}; - -} // namespace - -namespace internal { // This method expands all parameterized tests registered with macros TEST_P // and INSTANTIATE_TEST_SUITE_P into regular tests and registers those. @@ -2855,20 +2815,20 @@ void UnitTestImpl::RegisterParameterizedTests() { // Creates the test object, runs it, records its result, and then // deletes it. void TestInfo::Run() { - if (!should_run_) return; + TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); + if (!should_run_) { + if (is_disabled_ && matches_filter_) repeater->OnTestDisabled(*this); + return; + } // Tells UnitTest where to store test result. internal::UnitTestImpl* const impl = internal::GetUnitTestImpl(); impl->set_current_test_info(this); - TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater(); - // Notifies the unit test event listeners that a test is about to start. repeater->OnTestStart(*this); - result_.set_start_timestamp(internal::GetTimeInMillis()); internal::Timer timer; - impl->os_stack_trace_getter()->UponLeavingGTest(); // Creates the test object. @@ -3033,10 +2993,16 @@ void TestSuite::Run() { internal::HandleExceptionsInMethodIfSupported( this, &TestSuite::RunSetUpTestSuite, "SetUpTestSuite()"); + const bool skip_all = ad_hoc_test_result().Failed(); + start_timestamp_ = internal::GetTimeInMillis(); internal::Timer timer; for (int i = 0; i < total_test_count(); i++) { - GetMutableTestInfo(i)->Run(); + if (skip_all) { + GetMutableTestInfo(i)->Skip(); + } else { + GetMutableTestInfo(i)->Run(); + } if (GTEST_FLAG_GET(fail_fast) && GetMutableTestInfo(i)->result()->Failed()) { for (int j = i + 1; j < total_test_count(); j++) { @@ -3114,11 +3080,10 @@ void TestSuite::UnshuffleTests() { // // FormatCountableNoun(1, "formula", "formuli") returns "1 formula". // FormatCountableNoun(5, "book", "books") returns "5 books". -static std::string FormatCountableNoun(int count, - const char * singular_form, - const char * plural_form) { +static std::string FormatCountableNoun(int count, const char* singular_form, + const char* plural_form) { return internal::StreamableToString(count) + " " + - (count == 1 ? singular_form : plural_form); + (count == 1 ? singular_form : plural_form); } // Formats the count of tests. @@ -3135,7 +3100,7 @@ static std::string FormatTestSuiteCount(int test_suite_count) { // representation. Both kNonFatalFailure and kFatalFailure are translated // to "Failure", as the user usually doesn't care about the difference // between the two when viewing the test result. -static const char * TestPartResultTypeToString(TestPartResult::Type type) { +static const char* TestPartResultTypeToString(TestPartResult::Type type) { switch (type) { case TestPartResult::kSkip: return "Skipped\n"; @@ -3162,17 +3127,18 @@ enum class GTestColor { kDefault, kRed, kGreen, kYellow }; // Prints a TestPartResult to an std::string. static std::string PrintTestPartResultToString( const TestPartResult& test_part_result) { - return (Message() - << internal::FormatFileLocation(test_part_result.file_name(), - test_part_result.line_number()) - << " " << TestPartResultTypeToString(test_part_result.type()) - << test_part_result.message()).GetString(); + return (Message() << internal::FormatFileLocation( + test_part_result.file_name(), + test_part_result.line_number()) + << " " + << TestPartResultTypeToString(test_part_result.type()) + << test_part_result.message()) + .GetString(); } // Prints a TestPartResult. static void PrintTestPartResult(const TestPartResult& test_part_result) { - const std::string& result = - PrintTestPartResultToString(test_part_result); + const std::string& result = PrintTestPartResultToString(test_part_result); printf("%s\n", result.c_str()); fflush(stdout); // If the test program runs in Visual Studio or a debugger, the @@ -3189,8 +3155,8 @@ static void PrintTestPartResult(const TestPartResult& test_part_result) { } // class PrettyUnitTestResultPrinter -#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ - !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW +#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && \ + !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW // Returns the character attribute for the given color. static WORD GetColorAttribute(GTestColor color) { @@ -3201,7 +3167,8 @@ static WORD GetColorAttribute(GTestColor color) { return FOREGROUND_GREEN; case GTestColor::kYellow: return FOREGROUND_RED | FOREGROUND_GREEN; - default: return 0; + default: + return 0; } } @@ -3269,25 +3236,23 @@ bool ShouldUseColor(bool stdout_is_tty) { // On non-Windows platforms, we rely on the TERM variable. const char* const term = posix::GetEnv("TERM"); const bool term_supports_color = - String::CStringEquals(term, "xterm") || - String::CStringEquals(term, "xterm-color") || - String::CStringEquals(term, "xterm-256color") || - String::CStringEquals(term, "screen") || - String::CStringEquals(term, "screen-256color") || - String::CStringEquals(term, "tmux") || - String::CStringEquals(term, "tmux-256color") || - String::CStringEquals(term, "rxvt-unicode") || - String::CStringEquals(term, "rxvt-unicode-256color") || - String::CStringEquals(term, "linux") || - String::CStringEquals(term, "cygwin"); + term != nullptr && (String::CStringEquals(term, "xterm") || + String::CStringEquals(term, "xterm-color") || + String::CStringEquals(term, "xterm-kitty") || + String::CStringEquals(term, "screen") || + String::CStringEquals(term, "tmux") || + String::CStringEquals(term, "rxvt-unicode") || + String::CStringEquals(term, "linux") || + String::CStringEquals(term, "cygwin") || + String::EndsWithCaseInsensitive(term, "-256color")); return stdout_is_tty && term_supports_color; #endif // GTEST_OS_WINDOWS } return String::CaseInsensitiveCStringEquals(gtest_color, "yes") || - String::CaseInsensitiveCStringEquals(gtest_color, "true") || - String::CaseInsensitiveCStringEquals(gtest_color, "t") || - String::CStringEquals(gtest_color, "1"); + String::CaseInsensitiveCStringEquals(gtest_color, "true") || + String::CaseInsensitiveCStringEquals(gtest_color, "t") || + String::CStringEquals(gtest_color, "1"); // We take "yes", "true", "t", and "1" as meaning "yes". If the // value is neither one of these nor "auto", we treat it as "no" to // be conservative. @@ -3299,18 +3264,18 @@ bool ShouldUseColor(bool stdout_is_tty) { // that would be colored when printed, as can be done on Linux. GTEST_ATTRIBUTE_PRINTF_(2, 3) -static void ColoredPrintf(GTestColor color, const char *fmt, ...) { +static void ColoredPrintf(GTestColor color, const char* fmt, ...) { va_list args; va_start(args, fmt); -#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \ - GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT || defined(ESP_PLATFORM) - const bool use_color = AlwaysFalse(); -#else static const bool in_color_mode = +#if GTEST_HAS_FILE_SYSTEM ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0); +#else + false; +#endif // GTEST_HAS_FILE_SYSTEM + const bool use_color = in_color_mode && (color != GTestColor::kDefault); -#endif // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS if (!use_color) { vprintf(fmt, args); @@ -3318,8 +3283,8 @@ static void ColoredPrintf(GTestColor color, const char *fmt, ...) { return; } -#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \ - !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW +#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && \ + !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE); // Gets the current text color. @@ -3390,6 +3355,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener { #endif // OnTestCaseStart void OnTestStart(const TestInfo& test_info) override; + void OnTestDisabled(const TestInfo& test_info) override; void OnTestPartResult(const TestPartResult& result) override; void OnTestEnd(const TestInfo& test_info) override; @@ -3410,7 +3376,7 @@ class PrettyUnitTestResultPrinter : public TestEventListener { static void PrintSkippedTests(const UnitTest& unit_test); }; - // Fired before each iteration of tests starts. +// Fired before each iteration of tests starts. void PrettyUnitTestResultPrinter::OnTestIterationStart( const UnitTest& unit_test, int iteration) { if (GTEST_FLAG_GET(repeat) != 1) @@ -3489,6 +3455,13 @@ void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) { fflush(stdout); } +void PrettyUnitTestResultPrinter::OnTestDisabled(const TestInfo& test_info) { + ColoredPrintf(GTestColor::kYellow, "[ DISABLED ] "); + PrintTestName(test_info.test_suite_name(), test_info.name()); + printf("\n"); + fflush(stdout); +} + // Called after an assertion failure. void PrettyUnitTestResultPrinter::OnTestPartResult( const TestPartResult& result) { @@ -3513,12 +3486,12 @@ void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) { ColoredPrintf(GTestColor::kRed, "[ FAILED ] "); } PrintTestName(test_info.test_suite_name(), test_info.name()); - if (test_info.result()->Failed()) - PrintFullTestCommentIfPresent(test_info); + if (test_info.result()->Failed()) PrintFullTestCommentIfPresent(test_info); if (GTEST_FLAG_GET(print_time)) { - printf(" (%s ms)\n", internal::StreamableToString( - test_info.result()->elapsed_time()).c_str()); + printf(" (%s ms)\n", + internal::StreamableToString(test_info.result()->elapsed_time()) + .c_str()); } else { printf("\n"); } @@ -3691,6 +3664,7 @@ class BriefUnitTestResultPrinter : public TestEventListener { #endif // OnTestCaseStart void OnTestStart(const TestInfo& /*test_info*/) override {} + void OnTestDisabled(const TestInfo& /*test_info*/) override {} void OnTestPartResult(const TestPartResult& result) override; void OnTestEnd(const TestInfo& test_info) override; @@ -3779,7 +3753,7 @@ class TestEventRepeater : public TestEventListener { public: TestEventRepeater() : forwarding_enabled_(true) {} ~TestEventRepeater() override; - void Append(TestEventListener *listener); + void Append(TestEventListener* listener); TestEventListener* Release(TestEventListener* listener); // Controls whether events will be forwarded to listeners_. Set to false @@ -3797,6 +3771,7 @@ class TestEventRepeater : public TestEventListener { #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ void OnTestSuiteStart(const TestSuite& parameter) override; void OnTestStart(const TestInfo& test_info) override; + void OnTestDisabled(const TestInfo& test_info) override; void OnTestPartResult(const TestPartResult& result) override; void OnTestEnd(const TestInfo& test_info) override; // Legacy API is deprecated but still available @@ -3816,18 +3791,19 @@ class TestEventRepeater : public TestEventListener { // The list of listeners that receive events. std::vector listeners_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventRepeater); + TestEventRepeater(const TestEventRepeater&) = delete; + TestEventRepeater& operator=(const TestEventRepeater&) = delete; }; TestEventRepeater::~TestEventRepeater() { ForEach(listeners_, Delete); } -void TestEventRepeater::Append(TestEventListener *listener) { +void TestEventRepeater::Append(TestEventListener* listener) { listeners_.push_back(listener); } -TestEventListener* TestEventRepeater::Release(TestEventListener *listener) { +TestEventListener* TestEventRepeater::Release(TestEventListener* listener) { for (size_t i = 0; i < listeners_.size(); ++i) { if (listeners_[i] == listener) { listeners_.erase(listeners_.begin() + static_cast(i)); @@ -3840,14 +3816,14 @@ TestEventListener* TestEventRepeater::Release(TestEventListener *listener) { // Since most methods are very similar, use macros to reduce boilerplate. // This defines a member that forwards the call to all listeners. -#define GTEST_REPEATER_METHOD_(Name, Type) \ -void TestEventRepeater::Name(const Type& parameter) { \ - if (forwarding_enabled_) { \ - for (size_t i = 0; i < listeners_.size(); i++) { \ - listeners_[i]->Name(parameter); \ - } \ - } \ -} +#define GTEST_REPEATER_METHOD_(Name, Type) \ + void TestEventRepeater::Name(const Type& parameter) { \ + if (forwarding_enabled_) { \ + for (size_t i = 0; i < listeners_.size(); i++) { \ + listeners_[i]->Name(parameter); \ + } \ + } \ + } // This defines a member that forwards the call to all listeners in reverse // order. #define GTEST_REVERSE_REPEATER_METHOD_(Name, Type) \ @@ -3867,6 +3843,7 @@ GTEST_REPEATER_METHOD_(OnTestCaseStart, TestSuite) #endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ GTEST_REPEATER_METHOD_(OnTestSuiteStart, TestSuite) GTEST_REPEATER_METHOD_(OnTestStart, TestInfo) +GTEST_REPEATER_METHOD_(OnTestDisabled, TestInfo) GTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult) GTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest) GTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest) @@ -3902,6 +3879,7 @@ void TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test, // End TestEventRepeater +#if GTEST_HAS_FILE_SYSTEM // This class generates an XML output file. class XmlUnitTestResultPrinter : public EmptyTestEventListener { public: @@ -3917,12 +3895,13 @@ class XmlUnitTestResultPrinter : public EmptyTestEventListener { private: // Is c a whitespace character that is normalized to a space character // when it appears in an XML attribute value? - static bool IsNormalizableWhitespace(char c) { - return c == 0x9 || c == 0xA || c == 0xD; + static bool IsNormalizableWhitespace(unsigned char c) { + return c == '\t' || c == '\n' || c == '\r'; } // May c appear in a well-formed XML document? - static bool IsValidXmlCharacter(char c) { + // https://www.w3.org/TR/REC-xml/#charsets + static bool IsValidXmlCharacter(unsigned char c) { return IsNormalizableWhitespace(c) || c >= 0x20; } @@ -3992,7 +3971,8 @@ class XmlUnitTestResultPrinter : public EmptyTestEventListener { // The output file. const std::string output_file_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(XmlUnitTestResultPrinter); + XmlUnitTestResultPrinter(const XmlUnitTestResultPrinter&) = delete; + XmlUnitTestResultPrinter& operator=(const XmlUnitTestResultPrinter&) = delete; }; // Creates a new XmlUnitTestResultPrinter. @@ -4032,8 +4012,8 @@ void XmlUnitTestResultPrinter::ListTestsMatchingFilter( // module will consist of ordinary English text. // If this module is ever modified to produce version 1.1 XML output, // most invalid characters can be retained using character references. -std::string XmlUnitTestResultPrinter::EscapeXml( - const std::string& str, bool is_attribute) { +std::string XmlUnitTestResultPrinter::EscapeXml(const std::string& str, + bool is_attribute) { Message m; for (size_t i = 0; i < str.size(); ++i) { @@ -4061,8 +4041,9 @@ std::string XmlUnitTestResultPrinter::EscapeXml( m << '"'; break; default: - if (IsValidXmlCharacter(ch)) { - if (is_attribute && IsNormalizableWhitespace(ch)) + if (IsValidXmlCharacter(static_cast(ch))) { + if (is_attribute && + IsNormalizableWhitespace(static_cast(ch))) m << "&#x" << String::FormatByte(static_cast(ch)) << ";"; else @@ -4083,7 +4064,7 @@ std::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters( std::string output; output.reserve(str.size()); for (std::string::const_iterator it = str.begin(); it != str.end(); ++it) - if (IsValidXmlCharacter(*it)) + if (IsValidXmlCharacter(static_cast(*it))) output.push_back(*it); return output; @@ -4091,7 +4072,6 @@ std::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters( // The following routines generate an XML representation of a UnitTest // object. -// GOOGLETEST_CM0009 DO NOT DELETE // // This is how Google Test concepts map to the DTD: // @@ -4140,12 +4120,12 @@ std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) { return ""; // YYYY-MM-DDThh:mm:ss.sss return StreamableToString(time_struct.tm_year + 1900) + "-" + - String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + - String::FormatIntWidth2(time_struct.tm_mday) + "T" + - String::FormatIntWidth2(time_struct.tm_hour) + ":" + - String::FormatIntWidth2(time_struct.tm_min) + ":" + - String::FormatIntWidth2(time_struct.tm_sec) + "." + - String::FormatIntWidthN(static_cast(ms % 1000), 3); + String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + + String::FormatIntWidth2(time_struct.tm_mday) + "T" + + String::FormatIntWidth2(time_struct.tm_hour) + ":" + + String::FormatIntWidth2(time_struct.tm_min) + ":" + + String::FormatIntWidth2(time_struct.tm_sec) + "." + + String::FormatIntWidthN(static_cast(ms % 1000), 3); } // Streams an XML CDATA section, escaping invalid CDATA sequences as needed. @@ -4156,8 +4136,8 @@ void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream, for (;;) { const char* const next_segment = strstr(segment, "]]>"); if (next_segment != nullptr) { - stream->write( - segment, static_cast(next_segment - segment)); + stream->write(segment, + static_cast(next_segment - segment)); *stream << "]]>]]>"); } else { @@ -4169,15 +4149,13 @@ void XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream, } void XmlUnitTestResultPrinter::OutputXmlAttribute( - std::ostream* stream, - const std::string& element_name, - const std::string& name, - const std::string& value) { + std::ostream* stream, const std::string& element_name, + const std::string& name, const std::string& value) { const std::vector& allowed_names = GetReservedOutputAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != - allowed_names.end()) + allowed_names.end()) << "Attribute " << name << " is not allowed for element <" << element_name << ">."; @@ -4243,10 +4221,11 @@ void XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream, OutputXmlAttribute(stream, kTestsuite, "type_param", test_info.type_param()); } + + OutputXmlAttribute(stream, kTestsuite, "file", test_info.file()); + OutputXmlAttribute(stream, kTestsuite, "line", + StreamableToString(test_info.line())); if (GTEST_FLAG_GET(list_tests)) { - OutputXmlAttribute(stream, kTestsuite, "file", test_info.file()); - OutputXmlAttribute(stream, kTestsuite, "line", - StreamableToString(test_info.line())); *stream << " />\n"; return; } @@ -4281,8 +4260,7 @@ void XmlUnitTestResultPrinter::OutputXmlTestResult(::std::ostream* stream, internal::FormatCompilerIndependentFileLocation(part.file_name(), part.line_number()); const std::string summary = location + "\n" + part.summary(); - *stream << " "; const std::string detail = location + "\n" + part.message(); OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str()); @@ -4423,7 +4401,7 @@ std::string XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes( for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); attributes << " " << property.key() << "=" - << "\"" << EscapeXmlAttribute(property.value()) << "\""; + << "\"" << EscapeXmlAttribute(property.value()) << "\""; } return attributes.GetString(); } @@ -4437,19 +4415,21 @@ void XmlUnitTestResultPrinter::OutputXmlTestProperties( return; } - *stream << "<" << kProperties << ">\n"; + *stream << " <" << kProperties << ">\n"; for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); - *stream << "<" << kProperty; + *stream << " <" << kProperty; *stream << " name=\"" << EscapeXmlAttribute(property.key()) << "\""; *stream << " value=\"" << EscapeXmlAttribute(property.value()) << "\""; *stream << "/>\n"; } - *stream << "\n"; + *stream << " \n"; } // End XmlUnitTestResultPrinter +#endif // GTEST_HAS_FILE_SYSTEM +#if GTEST_HAS_FILE_SYSTEM // This class generates an JSON output file. class JsonUnitTestResultPrinter : public EmptyTestEventListener { public: @@ -4469,16 +4449,12 @@ class JsonUnitTestResultPrinter : public EmptyTestEventListener { //// streams the attribute as JSON. static void OutputJsonKey(std::ostream* stream, const std::string& element_name, - const std::string& name, - const std::string& value, - const std::string& indent, - bool comma = true); + const std::string& name, const std::string& value, + const std::string& indent, bool comma = true); static void OutputJsonKey(std::ostream* stream, const std::string& element_name, - const std::string& name, - int value, - const std::string& indent, - bool comma = true); + const std::string& name, int value, + const std::string& indent, bool comma = true); // Streams a test suite JSON stanza containing the given test result. // @@ -4511,7 +4487,9 @@ class JsonUnitTestResultPrinter : public EmptyTestEventListener { // The output file. const std::string output_file_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(JsonUnitTestResultPrinter); + JsonUnitTestResultPrinter(const JsonUnitTestResultPrinter&) = delete; + JsonUnitTestResultPrinter& operator=(const JsonUnitTestResultPrinter&) = + delete; }; // Creates a new JsonUnitTestResultPrinter. @@ -4523,7 +4501,7 @@ JsonUnitTestResultPrinter::JsonUnitTestResultPrinter(const char* output_file) } void JsonUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test, - int /*iteration*/) { + int /*iteration*/) { FILE* jsonout = OpenFileForWriting(output_file_); std::stringstream stream; PrintJsonUnitTest(&stream, unit_test); @@ -4589,55 +4567,48 @@ static std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) { return ""; // YYYY-MM-DDThh:mm:ss return StreamableToString(time_struct.tm_year + 1900) + "-" + - String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + - String::FormatIntWidth2(time_struct.tm_mday) + "T" + - String::FormatIntWidth2(time_struct.tm_hour) + ":" + - String::FormatIntWidth2(time_struct.tm_min) + ":" + - String::FormatIntWidth2(time_struct.tm_sec) + "Z"; + String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" + + String::FormatIntWidth2(time_struct.tm_mday) + "T" + + String::FormatIntWidth2(time_struct.tm_hour) + ":" + + String::FormatIntWidth2(time_struct.tm_min) + ":" + + String::FormatIntWidth2(time_struct.tm_sec) + "Z"; } static inline std::string Indent(size_t width) { return std::string(width, ' '); } -void JsonUnitTestResultPrinter::OutputJsonKey( - std::ostream* stream, - const std::string& element_name, - const std::string& name, - const std::string& value, - const std::string& indent, - bool comma) { +void JsonUnitTestResultPrinter::OutputJsonKey(std::ostream* stream, + const std::string& element_name, + const std::string& name, + const std::string& value, + const std::string& indent, + bool comma) { const std::vector& allowed_names = GetReservedOutputAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != - allowed_names.end()) + allowed_names.end()) << "Key \"" << name << "\" is not allowed for value \"" << element_name << "\"."; *stream << indent << "\"" << name << "\": \"" << EscapeJson(value) << "\""; - if (comma) - *stream << ",\n"; + if (comma) *stream << ",\n"; } void JsonUnitTestResultPrinter::OutputJsonKey( - std::ostream* stream, - const std::string& element_name, - const std::string& name, - int value, - const std::string& indent, - bool comma) { + std::ostream* stream, const std::string& element_name, + const std::string& name, int value, const std::string& indent, bool comma) { const std::vector& allowed_names = GetReservedOutputAttributesForElement(element_name); GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) != - allowed_names.end()) + allowed_names.end()) << "Key \"" << name << "\" is not allowed for value \"" << element_name << "\"."; *stream << indent << "\"" << name << "\": " << StreamableToString(value); - if (comma) - *stream << ",\n"; + if (comma) *stream << ",\n"; } // Streams a test suite JSON stanza containing the given test result. @@ -4701,11 +4672,14 @@ void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream, OutputJsonKey(stream, kTestsuite, "type_param", test_info.type_param(), kIndent); } + + OutputJsonKey(stream, kTestsuite, "file", test_info.file(), kIndent); + OutputJsonKey(stream, kTestsuite, "line", test_info.line(), kIndent, false); if (GTEST_FLAG_GET(list_tests)) { - OutputJsonKey(stream, kTestsuite, "file", test_info.file(), kIndent); - OutputJsonKey(stream, kTestsuite, "line", test_info.line(), kIndent, false); *stream << "\n" << Indent(8) << "}"; return; + } else { + *stream << ",\n"; } OutputJsonKey(stream, kTestsuite, "status", @@ -4737,7 +4711,9 @@ void JsonUnitTestResultPrinter::OutputJsonTestResult(::std::ostream* stream, if (part.failed()) { *stream << ",\n"; if (++failures == 1) { - *stream << kIndent << "\"" << "failures" << "\": [\n"; + *stream << kIndent << "\"" + << "failures" + << "\": [\n"; } const std::string location = internal::FormatCompilerIndependentFileLocation(part.file_name(), @@ -4750,8 +4726,7 @@ void JsonUnitTestResultPrinter::OutputJsonTestResult(::std::ostream* stream, } } - if (failures > 0) - *stream << "\n" << kIndent << "]"; + if (failures > 0) *stream << "\n" << kIndent << "]"; *stream << "\n" << Indent(8) << "}"; } @@ -4844,10 +4819,15 @@ void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream, // If there was a test failure outside of one of the test suites (like in a // test environment) include that in the output. if (unit_test.ad_hoc_test_result().Failed()) { + if (comma) { + *stream << ",\n"; + } OutputJsonTestSuiteForTestResult(stream, unit_test.ad_hoc_test_result()); } - *stream << "\n" << kIndent << "]\n" << "}\n"; + *stream << "\n" + << kIndent << "]\n" + << "}\n"; } void JsonUnitTestResultPrinter::PrintJsonTestList( @@ -4882,13 +4862,15 @@ std::string JsonUnitTestResultPrinter::TestPropertiesAsJson( Message attributes; for (int i = 0; i < result.test_property_count(); ++i) { const TestProperty& property = result.GetTestProperty(i); - attributes << ",\n" << indent << "\"" << property.key() << "\": " + attributes << ",\n" + << indent << "\"" << property.key() << "\": " << "\"" << EscapeJson(property.value()) << "\""; } return attributes.GetString(); } // End JsonUnitTestResultPrinter +#endif // GTEST_HAS_FILE_SYSTEM #if GTEST_CAN_STREAM_RESULTS_ @@ -4906,7 +4888,8 @@ std::string StreamingListener::UrlEncode(const char* str) { case '=': case '&': case '\n': - result.append("%" + String::FormatByte(static_cast(ch))); + result.push_back('%'); + result.append(String::FormatByte(static_cast(ch))); break; default: result.push_back(ch); @@ -4922,14 +4905,14 @@ void StreamingListener::SocketWriter::MakeConnection() { addrinfo hints; memset(&hints, 0, sizeof(hints)); - hints.ai_family = AF_UNSPEC; // To allow both IPv4 and IPv6 addresses. + hints.ai_family = AF_UNSPEC; // To allow both IPv4 and IPv6 addresses. hints.ai_socktype = SOCK_STREAM; addrinfo* servinfo = nullptr; // Use the getaddrinfo() to get a linked list of IP addresses for // the given host name. - const int error_num = getaddrinfo( - host_name_.c_str(), port_num_.c_str(), &hints, &servinfo); + const int error_num = + getaddrinfo(host_name_.c_str(), port_num_.c_str(), &hints, &servinfo); if (error_num != 0) { GTEST_LOG_(WARNING) << "stream_result_to: getaddrinfo() failed: " << gai_strerror(error_num); @@ -4938,8 +4921,8 @@ void StreamingListener::SocketWriter::MakeConnection() { // Loop through all the results and connect to the first we can. for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != nullptr; cur_addr = cur_addr->ai_next) { - sockfd_ = socket( - cur_addr->ai_family, cur_addr->ai_socktype, cur_addr->ai_protocol); + sockfd_ = socket(cur_addr->ai_family, cur_addr->ai_socktype, + cur_addr->ai_protocol); if (sockfd_ != -1) { // Connect the client socket to the server socket. if (connect(sockfd_, cur_addr->ai_addr, cur_addr->ai_addrlen) == -1) { @@ -5008,7 +4991,7 @@ std::string OsStackTraceGetter::CurrentStackTrace(int max_depth, int skip_count) return result; -#else // !GTEST_HAS_ABSL +#else // !GTEST_HAS_ABSL static_cast(max_depth); static_cast(skip_count); return ""; @@ -5027,26 +5010,27 @@ void OsStackTraceGetter::UponLeavingGTest() GTEST_LOCK_EXCLUDED_(mutex_) { #endif // GTEST_HAS_ABSL } +#if GTEST_HAS_DEATH_TEST // A helper class that creates the premature-exit file in its // constructor and deletes the file in its destructor. class ScopedPrematureExitFile { public: explicit ScopedPrematureExitFile(const char* premature_exit_filepath) - : premature_exit_filepath_(premature_exit_filepath ? - premature_exit_filepath : "") { + : premature_exit_filepath_( + premature_exit_filepath ? premature_exit_filepath : "") { // If a path to the premature-exit file is specified... if (!premature_exit_filepath_.empty()) { // create the file with a single "0" character in it. I/O // errors are ignored as there's nothing better we can do and we // don't want to fail the test because of this. - FILE* pfile = posix::FOpen(premature_exit_filepath, "w"); + FILE* pfile = posix::FOpen(premature_exit_filepath_.c_str(), "w"); fwrite("0", 1, 1, pfile); fclose(pfile); } } ~ScopedPrematureExitFile() { -#if !defined GTEST_OS_ESP8266 +#if !GTEST_OS_ESP8266 if (!premature_exit_filepath_.empty()) { int retval = remove(premature_exit_filepath_.c_str()); if (retval) { @@ -5061,8 +5045,10 @@ class ScopedPrematureExitFile { private: const std::string premature_exit_filepath_; - GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedPrematureExitFile); + ScopedPrematureExitFile(const ScopedPrematureExitFile&) = delete; + ScopedPrematureExitFile& operator=(const ScopedPrematureExitFile&) = delete; }; +#endif // GTEST_HAS_DEATH_TEST } // namespace internal @@ -5235,7 +5221,7 @@ int UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); } // Gets the time of the test program start, in ms from the start of the // UNIX epoch. internal::TimeInMillis UnitTest::start_timestamp() const { - return impl()->start_timestamp(); + return impl()->start_timestamp(); } // Gets the elapsed time, in milliseconds. @@ -5278,9 +5264,7 @@ TestSuite* UnitTest::GetMutableTestSuite(int i) { // Returns the list of event listeners that can be used to track events // inside Google Test. -TestEventListeners& UnitTest::listeners() { - return *impl()->listeners(); -} +TestEventListeners& UnitTest::listeners() { return *impl()->listeners(); } // Registers and returns a global test environment. When a test // program is run, all global test environments will be set-up in the @@ -5305,12 +5289,11 @@ Environment* UnitTest::AddEnvironment(Environment* env) { // assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call // this to report their results. The user code should use the // assertion macros instead of calling this directly. -void UnitTest::AddTestPartResult( - TestPartResult::Type result_type, - const char* file_name, - int line_number, - const std::string& message, - const std::string& os_stack_trace) GTEST_LOCK_EXCLUDED_(mutex_) { +void UnitTest::AddTestPartResult(TestPartResult::Type result_type, + const char* file_name, int line_number, + const std::string& message, + const std::string& os_stack_trace) + GTEST_LOCK_EXCLUDED_(mutex_) { Message msg; msg << message; @@ -5320,8 +5303,9 @@ void UnitTest::AddTestPartResult( for (size_t i = impl_->gtest_trace_stack().size(); i > 0; --i) { const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1]; - msg << "\n" << internal::FormatFileLocation(trace.file, trace.line) - << " " << trace.message; + msg << "\n" + << internal::FormatFileLocation(trace.file, trace.line) << " " + << trace.message; } } @@ -5331,8 +5315,8 @@ void UnitTest::AddTestPartResult( const TestPartResult result = TestPartResult( result_type, file_name, line_number, msg.GetString().c_str()); - impl_->GetTestPartResultReporterForCurrentThread()-> - ReportTestPartResult(result); + impl_->GetTestPartResultReporterForCurrentThread()->ReportTestPartResult( + result); if (result_type != TestPartResult::kSuccess && result_type != TestPartResult::kSkip) { @@ -5352,6 +5336,10 @@ void UnitTest::AddTestPartResult( (defined(__x86_64__) || defined(__i386__))) // with clang/gcc we can achieve the same effect on x86 by invoking int3 asm("int3"); +#elif GTEST_HAS_BUILTIN(__builtin_trap) + __builtin_trap(); +#elif defined(SIGTRAP) + raise(SIGTRAP); #else // Dereference nullptr through a volatile pointer to prevent the compiler // from removing. We use this rather than abort() or __builtin_trap() for @@ -5386,6 +5374,7 @@ void UnitTest::RecordProperty(const std::string& key, // We don't protect this under mutex_, as we only support calling it // from the main thread. int UnitTest::Run() { +#if GTEST_HAS_DEATH_TEST const bool in_death_test_child_process = GTEST_FLAG_GET(internal_run_death_test).length() > 0; @@ -5414,6 +5403,7 @@ int UnitTest::Run() { in_death_test_child_process ? nullptr : internal::posix::GetEnv("TEST_PREMATURE_EXIT_FILE")); +#endif // GTEST_HAS_DEATH_TEST // Captures the value of GTEST_FLAG(catch_exceptions). This value will be // used for the duration of the program. @@ -5425,20 +5415,20 @@ int UnitTest::Run() { // process. In either case the user does not want to see pop-up dialogs // about crashes - they are expected. if (impl()->catch_exceptions() || in_death_test_child_process) { -# if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT +#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT // SetErrorMode doesn't exist on CE. SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT | SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX); -# endif // !GTEST_OS_WINDOWS_MOBILE +#endif // !GTEST_OS_WINDOWS_MOBILE -# if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE +#if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE // Death test children can be terminated with _abort(). On Windows, // _abort() can show a dialog with a warning message. This forces the // abort message to go to stderr instead. _set_error_mode(_OUT_TO_STDERR); -# endif +#endif -# if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE +#if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE // In the debug version, Visual Studio pops up a separate dialog // offering a choice to debug the aborted program. We need to suppress // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement @@ -5458,21 +5448,24 @@ int UnitTest::Run() { _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG); (void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR); } -# endif +#endif } #endif // GTEST_OS_WINDOWS return internal::HandleExceptionsInMethodIfSupported( - impl(), - &internal::UnitTestImpl::RunAllTests, - "auxiliary test code (environments or event listeners)") ? 0 : 1; + impl(), &internal::UnitTestImpl::RunAllTests, + "auxiliary test code (environments or event listeners)") + ? 0 + : 1; } +#if GTEST_HAS_FILE_SYSTEM // Returns the working directory when the first TEST() or TEST_F() was // executed. const char* UnitTest::original_working_dir() const { return impl_->original_working_dir_.c_str(); } +#endif // GTEST_HAS_FILE_SYSTEM // Returns the TestSuite object for the test that's currently running, // or NULL if no test is running. @@ -5510,14 +5503,10 @@ UnitTest::parameterized_test_registry() GTEST_LOCK_EXCLUDED_(mutex_) { } // Creates an empty UnitTest. -UnitTest::UnitTest() { - impl_ = new internal::UnitTestImpl(this); -} +UnitTest::UnitTest() { impl_ = new internal::UnitTestImpl(this); } // Destructor of UnitTest. -UnitTest::~UnitTest() { - delete impl_; -} +UnitTest::~UnitTest() { delete impl_; } // Pushes a trace defined by SCOPED_TRACE() on to the per-thread // Google Test trace stack. @@ -5528,8 +5517,7 @@ void UnitTest::PushGTestTrace(const internal::TraceInfo& trace) } // Pops a trace from the per-thread Google Test trace stack. -void UnitTest::PopGTestTrace() - GTEST_LOCK_EXCLUDED_(mutex_) { +void UnitTest::PopGTestTrace() GTEST_LOCK_EXCLUDED_(mutex_) { internal::MutexLock lock(&mutex_); impl_->gtest_trace_stack().pop_back(); } @@ -5541,7 +5529,7 @@ UnitTestImpl::UnitTestImpl(UnitTest* parent) GTEST_DISABLE_MSC_WARNINGS_PUSH_(4355 /* using this in initializer */) default_global_test_part_result_reporter_(this), default_per_thread_test_part_result_reporter_(this), - GTEST_DISABLE_MSC_WARNINGS_POP_() global_test_part_result_repoter_( + GTEST_DISABLE_MSC_WARNINGS_POP_() global_test_part_result_reporter_( &default_global_test_part_result_reporter_), per_thread_test_part_result_reporter_( &default_per_thread_test_part_result_reporter_), @@ -5610,6 +5598,7 @@ void UnitTestImpl::SuppressTestEventsIfInSubprocess() { // UnitTestOptions. Must not be called before InitGoogleTest. void UnitTestImpl::ConfigureXmlOutput() { const std::string& output_format = UnitTestOptions::GetOutputFormat(); +#if GTEST_HAS_FILE_SYSTEM if (output_format == "xml") { listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter( UnitTestOptions::GetAbsolutePathToOutputFile().c_str())); @@ -5620,6 +5609,10 @@ void UnitTestImpl::ConfigureXmlOutput() { GTEST_LOG_(WARNING) << "WARNING: unrecognized output format \"" << output_format << "\" ignored."; } +#else + GTEST_LOG_(ERROR) << "ERROR: alternative output formats require " + << "GTEST_HAS_FILE_SYSTEM to be enabled"; +#endif // GTEST_HAS_FILE_SYSTEM } #if GTEST_CAN_STREAM_RESULTS_ @@ -5630,8 +5623,8 @@ void UnitTestImpl::ConfigureStreamingOutput() { if (!target.empty()) { const size_t pos = target.find(':'); if (pos != std::string::npos) { - listeners()->Append(new StreamingListener(target.substr(0, pos), - target.substr(pos+1))); + listeners()->Append( + new StreamingListener(target.substr(0, pos), target.substr(pos + 1))); } else { GTEST_LOG_(WARNING) << "unrecognized streaming target \"" << target << "\" ignored."; @@ -5737,9 +5730,9 @@ TestSuite* UnitTestImpl::GetTestSuite( auto* const new_test_suite = new TestSuite(test_suite_name, type_param, set_up_tc, tear_down_tc); + const UnitTestFilter death_test_suite_filter(kDeathTestSuiteFilter); // Is this a death test suite? - if (internal::UnitTestOptions::MatchesFilter(test_suite_name, - kDeathTestSuiteFilter)) { + if (death_test_suite_filter.MatchesName(test_suite_name)) { // Yes. Inserts the test suite after the last death test suite // defined so far. This only works when the test suites haven't // been shuffled. Otherwise we may end up running a death test @@ -5776,17 +5769,18 @@ bool UnitTestImpl::RunAllTests() { const bool gtest_is_initialized_before_run_all_tests = GTestIsInitialized(); // Do not run any test if the --help flag was specified. - if (g_help_flag) - return true; + if (g_help_flag) return true; // Repeats the call to the post-flag parsing initialization in case the // user didn't call InitGoogleTest. PostFlagParsingInit(); +#if GTEST_HAS_FILE_SYSTEM // Even if sharding is not on, test runners may want to use the // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding // protocol. internal::WriteToShardStatusFileIfNeeded(); +#endif // GTEST_HAS_FILE_SYSTEM // True if and only if we are in a subprocess for running a thread-safe-style // death test. @@ -5795,11 +5789,11 @@ bool UnitTestImpl::RunAllTests() { #if GTEST_HAS_DEATH_TEST in_subprocess_for_death_test = (internal_run_death_test_flag_.get() != nullptr); -# if defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) +#if defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) if (in_subprocess_for_death_test) { GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_(); } -# endif // defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) +#endif // defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_) #endif // GTEST_HAS_DEATH_TEST const bool should_shard = ShouldShard(kTestTotalShards, kTestShardIndex, @@ -5807,9 +5801,9 @@ bool UnitTestImpl::RunAllTests() { // Compares the full test names with the filter to decide which // tests to run. - const bool has_tests_to_run = FilterTests(should_shard - ? HONOR_SHARDING_PROTOCOL - : IGNORE_SHARDING_PROTOCOL) > 0; + const bool has_tests_to_run = + FilterTests(should_shard ? HONOR_SHARDING_PROTOCOL + : IGNORE_SHARDING_PROTOCOL) > 0; // Lists the tests and exits if the --gtest_list_tests flag was specified. if (GTEST_FLAG_GET(list_tests)) { @@ -5818,9 +5812,7 @@ bool UnitTestImpl::RunAllTests() { return true; } - random_seed_ = GTEST_FLAG_GET(shuffle) - ? GetRandomSeedFromFlag(GTEST_FLAG_GET(random_seed)) - : 0; + random_seed_ = GetRandomSeedFromFlag(GTEST_FLAG_GET(random_seed)); // True if and only if at least one test has failed. bool failed = false; @@ -5968,6 +5960,7 @@ bool UnitTestImpl::RunAllTests() { return !failed; } +#if GTEST_HAS_FILE_SYSTEM // Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file // if the variable is present. If a file already exists at this location, this // function will write over it. If the variable is present, but the file cannot @@ -5987,6 +5980,7 @@ void WriteToShardStatusFileIfNeeded() { fclose(file); } } +#endif // GTEST_HAS_FILE_SYSTEM // Checks whether sharding is enabled by examining the relevant // environment variable values. If the variables are present, @@ -5994,8 +5988,7 @@ void WriteToShardStatusFileIfNeeded() { // an error and exits. If in_subprocess_for_death_test, sharding is // disabled because it must only be applied to the original test // process. Otherwise, we could filter out death tests we intended to execute. -bool ShouldShard(const char* total_shards_env, - const char* shard_index_env, +bool ShouldShard(const char* total_shards_env, const char* shard_index_env, bool in_subprocess_for_death_test) { if (in_subprocess_for_death_test) { return false; @@ -6007,27 +6000,27 @@ bool ShouldShard(const char* total_shards_env, if (total_shards == -1 && shard_index == -1) { return false; } else if (total_shards == -1 && shard_index != -1) { - const Message msg = Message() - << "Invalid environment variables: you have " - << kTestShardIndex << " = " << shard_index - << ", but have left " << kTestTotalShards << " unset.\n"; + const Message msg = Message() << "Invalid environment variables: you have " + << kTestShardIndex << " = " << shard_index + << ", but have left " << kTestTotalShards + << " unset.\n"; ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); } else if (total_shards != -1 && shard_index == -1) { const Message msg = Message() - << "Invalid environment variables: you have " - << kTestTotalShards << " = " << total_shards - << ", but have left " << kTestShardIndex << " unset.\n"; + << "Invalid environment variables: you have " + << kTestTotalShards << " = " << total_shards + << ", but have left " << kTestShardIndex << " unset.\n"; ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); } else if (shard_index < 0 || shard_index >= total_shards) { - const Message msg = Message() - << "Invalid environment variables: we require 0 <= " - << kTestShardIndex << " < " << kTestTotalShards - << ", but you have " << kTestShardIndex << "=" << shard_index - << ", " << kTestTotalShards << "=" << total_shards << ".\n"; + const Message msg = + Message() << "Invalid environment variables: we require 0 <= " + << kTestShardIndex << " < " << kTestTotalShards + << ", but you have " << kTestShardIndex << "=" << shard_index + << ", " << kTestTotalShards << "=" << total_shards << ".\n"; ColoredPrintf(GTestColor::kRed, "%s", msg.GetString().c_str()); fflush(stdout); exit(EXIT_FAILURE); @@ -6066,14 +6059,19 @@ bool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) { // each TestSuite and TestInfo object. // If shard_tests == true, further filters tests based on sharding // variables in the environment - see -// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md +// https://github.com/google/googletest/blob/main/docs/advanced.md // . Returns the number of tests that should run. int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) { - const int32_t total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ? - Int32FromEnvOrDie(kTestTotalShards, -1) : -1; - const int32_t shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ? - Int32FromEnvOrDie(kTestShardIndex, -1) : -1; - + const int32_t total_shards = shard_tests == HONOR_SHARDING_PROTOCOL + ? Int32FromEnvOrDie(kTestTotalShards, -1) + : -1; + const int32_t shard_index = shard_tests == HONOR_SHARDING_PROTOCOL + ? Int32FromEnvOrDie(kTestShardIndex, -1) + : -1; + + const PositiveAndNegativeUnitTestFilter gtest_flag_filter( + GTEST_FLAG_GET(filter)); + const UnitTestFilter disable_test_filter(kDisableTestFilter); // num_runnable_tests are the number of tests that will // run across all shards (i.e., match filter and are not disabled). // num_selected_tests are the number of tests to be run on @@ -6089,14 +6087,13 @@ int UnitTestImpl::FilterTests(ReactionToSharding shard_tests) { const std::string test_name(test_info->name()); // A test is disabled if test suite name or test name matches // kDisableTestFilter. - const bool is_disabled = internal::UnitTestOptions::MatchesFilter( - test_suite_name, kDisableTestFilter) || - internal::UnitTestOptions::MatchesFilter( - test_name, kDisableTestFilter); + const bool is_disabled = + disable_test_filter.MatchesName(test_suite_name) || + disable_test_filter.MatchesName(test_name); test_info->is_disabled_ = is_disabled; - const bool matches_filter = internal::UnitTestOptions::FilterMatchesTest( - test_suite_name, test_name); + const bool matches_filter = + gtest_flag_filter.MatchesTest(test_suite_name, test_name); test_info->matches_filter_ = matches_filter; const bool is_runnable = @@ -6175,6 +6172,7 @@ void UnitTestImpl::ListTestsMatchingFilter() { } } fflush(stdout); + #if GTEST_HAS_FILE_SYSTEM const std::string& output_format = UnitTestOptions::GetOutputFormat(); if (output_format == "xml" || output_format == "json") { FILE* fileout = OpenFileForWriting( @@ -6192,6 +6190,7 @@ void UnitTestImpl::ListTestsMatchingFilter() { fprintf(fileout, "%s", StringStreamToString(&stream).c_str()); fclose(fileout); } +#endif // GTEST_HAS_FILE_SYSTEM } // Sets the OS stack trace getter. @@ -6269,8 +6268,8 @@ void UnitTestImpl::UnshuffleTests() { // For example, if Foo() calls Bar(), which in turn calls // GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in // the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't. -std::string GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/, - int skip_count) { +GTEST_NO_INLINE_ GTEST_NO_TAIL_CALL_ std::string +GetCurrentOsStackTraceExceptTop(int skip_count) { // We pass skip_count + 1 to skip this wrapper function in addition // to what the user really wants to skip. return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count + 1); @@ -6280,7 +6279,7 @@ std::string GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/, // suppress unreachable code warnings. namespace { class ClassUniqueToAlwaysTrue {}; -} +} // namespace bool IsTrue(bool condition) { return condition; } @@ -6288,8 +6287,7 @@ bool AlwaysTrue() { #if GTEST_HAS_EXCEPTIONS // This condition is always false so AlwaysTrue() never actually throws, // but it makes the compiler think that it may throw. - if (IsTrue(false)) - throw ClassUniqueToAlwaysTrue(); + if (IsTrue(false)) throw ClassUniqueToAlwaysTrue(); #endif // GTEST_HAS_EXCEPTIONS return true; } @@ -6401,8 +6399,7 @@ static bool ParseFlag(const char* str, const char* flag_name, String* value) { // GTEST_INTERNAL_PREFIX_ followed by "internal_" are considered Google Test // internal flags and do not trigger the help message. static bool HasGoogleTestFlagPrefix(const char* str) { - return (SkipPrefix("--", &str) || - SkipPrefix("-", &str) || + return (SkipPrefix("--", &str) || SkipPrefix("-", &str) || SkipPrefix("/", &str)) && !SkipPrefix(GTEST_FLAG_PREFIX_ "internal_", &str) && (SkipPrefix(GTEST_FLAG_PREFIX_, &str) || @@ -6506,18 +6503,18 @@ static const char kColorEncodedHelpMessage[] = " Generate a JSON or XML report in the given directory or with the " "given\n" " file name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\n" -# if GTEST_CAN_STREAM_RESULTS_ +#if GTEST_CAN_STREAM_RESULTS_ " @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n" " Stream test results to the given server.\n" -# endif // GTEST_CAN_STREAM_RESULTS_ +#endif // GTEST_CAN_STREAM_RESULTS_ "\n" "Assertion Behavior:\n" -# if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS +#if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS " @G--" GTEST_FLAG_PREFIX_ "death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\n" " Set the default death test style.\n" -# endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS +#endif // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS " @G--" GTEST_FLAG_PREFIX_ "break_on_failure@D\n" " Turn assertion failures into debugger break-points.\n" @@ -6582,7 +6579,7 @@ static bool ParseGoogleTestFlag(const char* const arg) { return false; } -#if GTEST_USE_OWN_FLAGFILE_FLAG_ +#if GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM static void LoadFlagsFromFile(const std::string& path) { FILE* flagfile = posix::FOpen(path.c_str(), "r"); if (!flagfile) { @@ -6594,13 +6591,11 @@ static void LoadFlagsFromFile(const std::string& path) { std::vector lines; SplitString(contents, '\n', &lines); for (size_t i = 0; i < lines.size(); ++i) { - if (lines[i].empty()) - continue; - if (!ParseGoogleTestFlag(lines[i].c_str())) - g_help_flag = true; + if (lines[i].empty()) continue; + if (!ParseGoogleTestFlag(lines[i].c_str())) g_help_flag = true; } } -#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ +#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM // Parses the command line for Google Test flags, without initializing // other parts of Google Test. The type parameter CharType can be @@ -6617,15 +6612,13 @@ void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) { bool remove_flag = false; if (ParseGoogleTestFlag(arg)) { remove_flag = true; -#if GTEST_USE_OWN_FLAGFILE_FLAG_ +#if GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM } else if (ParseFlag(arg, "flagfile", &flagfile_value)) { GTEST_FLAG_SET(flagfile, flagfile_value); LoadFlagsFromFile(flagfile_value); remove_flag = true; -#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ - } else if (arg_string == "--help" || arg_string == "-h" || - arg_string == "-?" || arg_string == "/?" || - HasGoogleTestFlagPrefix(arg)) { +#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ && GTEST_HAS_FILE_SYSTEM + } else if (arg_string == "--help" || HasGoogleTestFlagPrefix(arg)) { // Both help flag and unrecognized Google Test flags (excluding // internal ones) trigger help display. g_help_flag = true; @@ -6660,7 +6653,27 @@ void ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) { // Parses the command line for Google Test flags, without initializing // other parts of Google Test. void ParseGoogleTestFlagsOnly(int* argc, char** argv) { +#if GTEST_HAS_ABSL + if (*argc > 0) { + // absl::ParseCommandLine() requires *argc > 0. + auto positional_args = absl::flags_internal::ParseCommandLineImpl( + *argc, argv, absl::flags_internal::ArgvListAction::kRemoveParsedArgs, + absl::flags_internal::UsageFlagsAction::kHandleUsage, + absl::flags_internal::OnUndefinedFlag::kReportUndefined); + // Any command-line positional arguments not part of any command-line flag + // (or arguments to a flag) are copied back out to argv, with the program + // invocation name at position 0, and argc is resized. This includes + // positional arguments after the flag-terminating delimiter '--'. + // See https://abseil.io/docs/cpp/guides/flags. + std::copy(positional_args.begin(), positional_args.end(), argv); + if (static_cast(positional_args.size()) < *argc) { + argv[positional_args.size()] = nullptr; + *argc = static_cast(positional_args.size()); + } + } +#else ParseGoogleTestFlagsOnlyImpl(argc, argv); +#endif // Fix the value of *_NSGetArgc() on macOS, but if and only if // *_NSGetArgv() == argv @@ -6695,6 +6708,12 @@ void InitGoogleTestImpl(int* argc, CharType** argv) { #if GTEST_HAS_ABSL absl::InitializeSymbolizer(g_argvs[0].c_str()); + + // When using the Abseil Flags library, set the program usage message to the + // help message, but remove the color-encoding from the message first. + absl::SetProgramUsageMessage(absl::StrReplaceAll( + kColorEncodedHelpMessage, + {{"@D", ""}, {"@R", ""}, {"@G", ""}, {"@Y", ""}, {"@@", "@"}})); #endif // GTEST_HAS_ABSL ParseGoogleTestFlagsOnly(argc, argv); @@ -6715,7 +6734,7 @@ void InitGoogleTestImpl(int* argc, CharType** argv) { void InitGoogleTest(int* argc, char** argv) { #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); -#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) +#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) internal::InitGoogleTestImpl(argc, argv); #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) } @@ -6725,7 +6744,7 @@ void InitGoogleTest(int* argc, char** argv) { void InitGoogleTest(int* argc, wchar_t** argv) { #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv); -#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) +#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) internal::InitGoogleTestImpl(argc, argv); #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) } @@ -6741,42 +6760,67 @@ void InitGoogleTest() { #if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv); -#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) +#else // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) internal::InitGoogleTestImpl(&argc, argv); #endif // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_) } +#if !defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) || \ + !defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) +// Returns the value of the first environment variable that is set and contains +// a non-empty string. If there are none, returns the "fallback" string. Adds +// the director-separator character as a suffix if not provided in the +// environment variable value. +static std::string GetDirFromEnv( + std::initializer_list environment_variables, + const char* fallback, char separator) { + for (const char* variable_name : environment_variables) { + const char* value = internal::posix::GetEnv(variable_name); + if (value != nullptr && value[0] != '\0') { + if (value[strlen(value) - 1] != separator) { + return std::string(value).append(1, separator); + } + return value; + } + } + return fallback; +} +#endif + std::string TempDir() { #if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_) return GTEST_CUSTOM_TEMPDIR_FUNCTION_(); -#elif GTEST_OS_WINDOWS_MOBILE - return "\\temp\\"; -#elif GTEST_OS_WINDOWS - const char* temp_dir = internal::posix::GetEnv("TEMP"); - if (temp_dir == nullptr || temp_dir[0] == '\0') { - return "\\temp\\"; - } else if (temp_dir[strlen(temp_dir) - 1] == '\\') { - return temp_dir; - } else { - return std::string(temp_dir) + "\\"; - } +#elif GTEST_OS_WINDOWS || GTEST_OS_WINDOWS_MOBILE + return GetDirFromEnv({"TEST_TMPDIR", "TEMP"}, "\\temp\\", '\\'); #elif GTEST_OS_LINUX_ANDROID - const char* temp_dir = internal::posix::GetEnv("TEST_TMPDIR"); - if (temp_dir == nullptr || temp_dir[0] == '\0') { - return "/data/local/tmp/"; - } else { - return temp_dir; - } -#elif GTEST_OS_LINUX - const char* temp_dir = internal::posix::GetEnv("TEST_TMPDIR"); - if (temp_dir == nullptr || temp_dir[0] == '\0') { - return "/tmp/"; - } else { - return temp_dir; - } + return GetDirFromEnv({"TEST_TMPDIR", "TMPDIR"}, "/data/local/tmp/", '/'); #else - return "/tmp/"; -#endif // GTEST_OS_WINDOWS_MOBILE + return GetDirFromEnv({"TEST_TMPDIR", "TMPDIR"}, "/tmp/", '/'); +#endif +} + +#if !defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) +// Returns the directory path (including terminating separator) of the current +// executable as derived from argv[0]. +static std::string GetCurrentExecutableDirectory() { + internal::FilePath argv_0(internal::GetArgvs()[0]); + return argv_0.RemoveFileName().string(); +} +#endif + +std::string SrcDir() { +#if defined(GTEST_CUSTOM_SRCDIR_FUNCTION_) + return GTEST_CUSTOM_SRCDIR_FUNCTION_(); +#elif GTEST_OS_WINDOWS || GTEST_OS_WINDOWS_MOBILE + return GetDirFromEnv({"TEST_SRCDIR"}, GetCurrentExecutableDirectory().c_str(), + '\\'); +#elif GTEST_OS_LINUX_ANDROID + return GetDirFromEnv({"TEST_SRCDIR"}, GetCurrentExecutableDirectory().c_str(), + '/'); +#else + return GetDirFromEnv({"TEST_SRCDIR"}, GetCurrentExecutableDirectory().c_str(), + '/'); +#endif } // Class ScopedTrace @@ -6793,8 +6837,7 @@ void ScopedTrace::PushTrace(const char* file, int line, std::string message) { } // Pops the info pushed by the c'tor. -ScopedTrace::~ScopedTrace() - GTEST_LOCK_EXCLUDED_(&UnitTest::mutex_) { +ScopedTrace::~ScopedTrace() GTEST_LOCK_EXCLUDED_(&UnitTest::mutex_) { UnitTest::GetInstance()->PopGTestTrace(); } diff --git a/deps/googletest/src/gtest_main.cc b/deps/googletest/src/gtest_main.cc index 46b27c3d7d5654..5abaa29fa1142c 100644 --- a/deps/googletest/src/gtest_main.cc +++ b/deps/googletest/src/gtest_main.cc @@ -28,15 +28,17 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include + #include "gtest/gtest.h" #if GTEST_OS_ESP8266 || GTEST_OS_ESP32 +// Arduino-like platforms: program entry points are setup/loop instead of main. + #if GTEST_OS_ESP8266 extern "C" { #endif -void setup() { - testing::InitGoogleTest(); -} + +void setup() { testing::InitGoogleTest(); } void loop() { RUN_ALL_TESTS(); } @@ -44,7 +46,16 @@ void loop() { RUN_ALL_TESTS(); } } #endif +#elif GTEST_OS_QURT +// QuRT: program entry point is main, but argc/argv are unusable. + +GTEST_API_ int main() { + printf("Running main() from %s\n", __FILE__); + testing::InitGoogleTest(); + return RUN_ALL_TESTS(); +} #else +// Normal platforms: program entry point is main, argc/argv are initialized. GTEST_API_ int main(int argc, char **argv) { printf("Running main() from %s\n", __FILE__); From 09adb86c19de75aa3e90a081bd234f2e5dee3983 Mon Sep 17 00:00:00 2001 From: Almeida Date: Sun, 15 Jan 2023 13:12:12 +0000 Subject: [PATCH 158/191] tools: fix macro name in update-undici PR-URL: https://github.com/nodejs/node/pull/46217 Reviewed-By: Luigi Pinca Reviewed-By: Mohammed Keyvanzadeh --- tools/update-undici.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/update-undici.sh b/tools/update-undici.sh index d3642088c5a14e..ead449bcd1185f 100755 --- a/tools/update-undici.sh +++ b/tools/update-undici.sh @@ -32,10 +32,10 @@ rm -f deps/undici/undici.js FILE_PATH="$ROOT/src/undici_version.h" echo "// This is an auto generated file, please do not edit." > "$FILE_PATH" echo "// Refer to tools/update-undici.sh" >> "$FILE_PATH" - echo "#ifndef SRC_ACORN_VERSION_H_" >> "$FILE_PATH" - echo "#define SRC_ACORN_VERSION_H_" >> "$FILE_PATH" + echo "#ifndef SRC_UNDICI_VERSION_H_" >> "$FILE_PATH" + echo "#define SRC_UNDICI_VERSION_H_" >> "$FILE_PATH" echo "#define UNDICI_VERSION \"$UNDICI_VERSION\"" >> "$FILE_PATH" - echo "#endif // SRC_ACORN_VERSION_H_" >> "$FILE_PATH" + echo "#endif // SRC_UNDICI_VERSION_H_" >> "$FILE_PATH" ) mv undici-tmp/node_modules/undici deps/undici/src From fcca2d5ea6726a12126eec1adf5bb6e2b18effcc Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Sun, 15 Jan 2023 10:30:36 -0500 Subject: [PATCH 159/191] tools: update lint-md-dependencies Update to remark-preset-lint-node@3.4.0 rollup@3.10.0. PR-URL: https://github.com/nodejs/node/pull/46214 Reviewed-By: Rich Trott Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Colin Ihrig --- tools/lint-md/lint-md.mjs | 1 + tools/lint-md/package-lock.json | 52 ++++++++++++++++----------------- tools/lint-md/package.json | 4 +-- 3 files changed, 29 insertions(+), 28 deletions(-) diff --git a/tools/lint-md/lint-md.mjs b/tools/lint-md/lint-md.mjs index e9542ce5b66fd2..d3bcaafcefc131 100644 --- a/tools/lint-md/lint-md.mjs +++ b/tools/lint-md/lint-md.mjs @@ -20781,6 +20781,7 @@ const plugins = [ remarkLintProhibitedStrings, [ { yes: "End-of-Life" }, + { no: "filesystem", yes: "file system" }, { yes: "GitHub" }, { no: "hostname", yes: "host name" }, { yes: "JavaScript" }, diff --git a/tools/lint-md/package-lock.json b/tools/lint-md/package-lock.json index 383dbbfd8ccb6b..3f2e4bf045a111 100644 --- a/tools/lint-md/package-lock.json +++ b/tools/lint-md/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "dependencies": { "remark-parse": "^10.0.1", - "remark-preset-lint-node": "^3.3.1", + "remark-preset-lint-node": "^3.4.0", "remark-stringify": "^10.0.2", "to-vfile": "^7.2.3", "unified": "^10.1.2", @@ -18,7 +18,7 @@ "devDependencies": { "@rollup/plugin-commonjs": "^24.0.0", "@rollup/plugin-node-resolve": "^15.0.1", - "rollup": "^3.9.1", + "rollup": "^3.10.0", "rollup-plugin-cleanup": "^3.2.1" } }, @@ -351,9 +351,9 @@ "dev": true }, "node_modules/glob": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", - "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -1299,9 +1299,9 @@ ] }, "node_modules/minimatch": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.2.tgz", - "integrity": "sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.4.tgz", + "integrity": "sha512-U0iNYXt9wALljzfnGkhFSy5sAC6/SCR3JrHrlsdJz4kF8MvhTRQNiC59iUi1iqsitV7abrNAJWElVL9pdnoUgw==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -2123,9 +2123,9 @@ } }, "node_modules/remark-preset-lint-node": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.1.tgz", - "integrity": "sha512-TEkyYgmaiWd/oKy0i5Nyn/CW1nfufqtWna4WaO689bsE7fbzPxsmiHY7Q8hRq9KAkneknrJOKJKgcRBGKMuivQ==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.4.0.tgz", + "integrity": "sha512-8y2zZMwME1f7WGJSTAJGpAH6QRCQUV0Q3d8w3ecGoK/veRWX1gNpsRB3TH4JLDFF3v3zEOL7bs9Sexq47mT+MQ==", "dependencies": { "js-yaml": "^4.1.0", "remark-gfm": "^3.0.1", @@ -2227,9 +2227,9 @@ } }, "node_modules/rollup": { - "version": "3.9.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.9.1.tgz", - "integrity": "sha512-GswCYHXftN8ZKGVgQhTFUJB/NBXxrRGgO2NCy6E8s1rwEJ4Q9/VttNqcYfEvx4dTo4j58YqdC3OVztPzlKSX8w==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.10.0.tgz", + "integrity": "sha512-JmRYz44NjC1MjVF2VKxc0M1a97vn+cDxeqWmnwyAF4FvpjK8YFdHpaqvQB+3IxCvX05vJxKZkoMDU8TShhmJVA==", "dev": true, "bin": { "rollup": "dist/bin/rollup" @@ -2908,9 +2908,9 @@ "dev": true }, "glob": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", - "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3500,9 +3500,9 @@ "integrity": "sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w==" }, "minimatch": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.2.tgz", - "integrity": "sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==", + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.4.tgz", + "integrity": "sha512-U0iNYXt9wALljzfnGkhFSy5sAC6/SCR3JrHrlsdJz4kF8MvhTRQNiC59iUi1iqsitV7abrNAJWElVL9pdnoUgw==", "dev": true, "requires": { "brace-expansion": "^2.0.1" @@ -4128,9 +4128,9 @@ } }, "remark-preset-lint-node": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.3.1.tgz", - "integrity": "sha512-TEkyYgmaiWd/oKy0i5Nyn/CW1nfufqtWna4WaO689bsE7fbzPxsmiHY7Q8hRq9KAkneknrJOKJKgcRBGKMuivQ==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/remark-preset-lint-node/-/remark-preset-lint-node-3.4.0.tgz", + "integrity": "sha512-8y2zZMwME1f7WGJSTAJGpAH6QRCQUV0Q3d8w3ecGoK/veRWX1gNpsRB3TH4JLDFF3v3zEOL7bs9Sexq47mT+MQ==", "requires": { "js-yaml": "^4.1.0", "remark-gfm": "^3.0.1", @@ -4215,9 +4215,9 @@ } }, "rollup": { - "version": "3.9.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.9.1.tgz", - "integrity": "sha512-GswCYHXftN8ZKGVgQhTFUJB/NBXxrRGgO2NCy6E8s1rwEJ4Q9/VttNqcYfEvx4dTo4j58YqdC3OVztPzlKSX8w==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.10.0.tgz", + "integrity": "sha512-JmRYz44NjC1MjVF2VKxc0M1a97vn+cDxeqWmnwyAF4FvpjK8YFdHpaqvQB+3IxCvX05vJxKZkoMDU8TShhmJVA==", "dev": true, "requires": { "fsevents": "~2.3.2" diff --git a/tools/lint-md/package.json b/tools/lint-md/package.json index 88f9ec7a9d40e7..bb400770707877 100644 --- a/tools/lint-md/package.json +++ b/tools/lint-md/package.json @@ -7,7 +7,7 @@ }, "dependencies": { "remark-parse": "^10.0.1", - "remark-preset-lint-node": "^3.3.1", + "remark-preset-lint-node": "^3.4.0", "remark-stringify": "^10.0.2", "to-vfile": "^7.2.3", "unified": "^10.1.2", @@ -16,7 +16,7 @@ "devDependencies": { "@rollup/plugin-commonjs": "^24.0.0", "@rollup/plugin-node-resolve": "^15.0.1", - "rollup": "^3.9.1", + "rollup": "^3.10.0", "rollup-plugin-cleanup": "^3.2.1" } } From db617222da1cac86b80f30689d4f40bec3bf8b1d Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Sun, 15 Jan 2023 15:24:24 -0500 Subject: [PATCH 160/191] meta: update AUTHORS PR-URL: https://github.com/nodejs/node/pull/46215 Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott --- AUTHORS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index ea7b1393b1c623..0ac475b8289dc0 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3593,5 +3593,7 @@ Eric Mutta Vadim Aaron Friel Vaishno Chaitanya +Jonathan Diaz <50384299+jdiaz-dev@users.noreply.github.com> +Mike Roth # Generated by tools/update-authors.mjs From 4f2d9ea6da645859dbe65aa93b4e64734518d114 Mon Sep 17 00:00:00 2001 From: Kevin Eady <8634912+KevinEady@users.noreply.github.com> Date: Sun, 15 Jan 2023 22:08:45 +0100 Subject: [PATCH 161/191] doc: add Node-API media link PR-URL: https://github.com/nodejs/node/pull/46189 Reviewed-By: Chengzhong Wu Reviewed-By: Michael Dawson Reviewed-By: Luigi Pinca --- doc/api/n-api.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 004121ee601b49..d31d72783761f2 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -80,7 +80,8 @@ for `node-addon-api`. The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an excellent orientation and tips for developers just getting started with -Node-API and `node-addon-api`. +Node-API and `node-addon-api`. Additional media resources can be found on the +[Node-API Media][] page. ## Implications of ABI stability @@ -6329,6 +6330,7 @@ the add-on's file name during loading. [GitHub releases]: https://help.github.com/en/github/administering-a-repository/about-releases [LLVM]: https://llvm.org [Native Abstractions for Node.js]: https://github.com/nodejs/nan +[Node-API Media]: https://github.com/nodejs/abi-stable-node/blob/HEAD/node-api-media.md [Object lifetime management]: #object-lifetime-management [Object wrap]: #object-wrap [Section 12.10.4]: https://tc39.github.io/ecma262/#sec-instanceofoperator From edcd4fc5760fccd2baeb22fb0c3fca6da6e12ff3 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Mon, 16 Jan 2023 07:10:42 +0900 Subject: [PATCH 162/191] lib: use kEmptyObject and update JSDoc in webstreams Use kEmptyObject as default value of strategy. Plus, make reason and chunk as optional. And refactor to use validateBuffer. Refs: https://github.com/nodejs/node/blob/main/doc/api/webstreams.md#transformstreamdefaultcontrollerenqueuechunk Refs: https://github.com/nodejs/node/blob/main/doc/api/webstreams.md#transformstreamdefaultcontrollererrorreason Refs: https://github.com/nodejs/node/blob/main/doc/api/webstreams.md#writablestreamdefaultwriterabortreason Refs: https://github.com/nodejs/node/blob/main/doc/api/webstreams.md#writablestreamdefaultwriterwritechunk PR-URL: https://github.com/nodejs/node/pull/46183 Reviewed-By: Antoine du Hamel Reviewed-By: Yagiz Nizipli Reviewed-By: James M Snell Reviewed-By: Minwoo Jung --- lib/internal/webstreams/readablestream.js | 11 +---------- lib/internal/webstreams/transformstream.js | 9 +++++---- lib/internal/webstreams/writablestream.js | 7 ++++--- 3 files changed, 10 insertions(+), 17 deletions(-) diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index 1f1622551c9559..e94f8fbcf156c4 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -1132,16 +1132,7 @@ class ReadableByteStreamController { enqueue(chunk) { if (!isReadableByteStreamController(this)) throw new ERR_INVALID_THIS('ReadableByteStreamController'); - if (!isArrayBufferView(chunk)) { - throw new ERR_INVALID_ARG_TYPE( - 'chunk', - [ - 'Buffer', - 'TypedArray', - 'DataView', - ], - chunk); - } + validateBuffer(chunk); const chunkByteLength = ArrayBufferViewGetByteLength(chunk); const chunkBuffer = ArrayBufferViewGetBuffer(chunk); const chunkBufferByteLength = ArrayBufferPrototypeGetByteLength(chunkBuffer); diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js index 38342b41804385..6e119bde63bb2f 100644 --- a/lib/internal/webstreams/transformstream.js +++ b/lib/internal/webstreams/transformstream.js @@ -26,6 +26,7 @@ const { const { createDeferredPromise, customInspectSymbol: kInspect, + kEmptyObject, kEnumerableProperty, } = require('internal/util'); @@ -117,8 +118,8 @@ class TransformStream { */ constructor( transformer = null, - writableStrategy = {}, - readableStrategy = {}) { + writableStrategy = kEmptyObject, + readableStrategy = kEmptyObject) { const readableType = transformer?.readableType; const writableType = transformer?.writableType; const start = transformer?.start; @@ -292,7 +293,7 @@ class TransformStreamDefaultController { } /** - * @param {any} chunk + * @param {any} [chunk] */ enqueue(chunk = undefined) { if (!isTransformStreamDefaultController(this)) @@ -301,7 +302,7 @@ class TransformStreamDefaultController { } /** - * @param {any} reason + * @param {any} [reason] */ error(reason = undefined) { if (!isTransformStreamDefaultController(this)) diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js index b9b1dfedd04659..3d5851753057a9 100644 --- a/lib/internal/webstreams/writablestream.js +++ b/lib/internal/webstreams/writablestream.js @@ -33,6 +33,7 @@ const { const { createDeferredPromise, customInspectSymbol: kInspect, + kEmptyObject, kEnumerableProperty, SideEffectFreeRegExpPrototypeSymbolReplace, } = require('internal/util'); @@ -148,7 +149,7 @@ class WritableStream { * @param {UnderlyingSink} [sink] * @param {QueuingStrategy} [strategy] */ - constructor(sink = null, strategy = {}) { + constructor(sink = null, strategy = kEmptyObject) { const type = sink?.type; if (type !== undefined) throw new ERR_INVALID_ARG_VALUE.RangeError('type', type); @@ -217,7 +218,7 @@ class WritableStream { } /** - * @param {any} reason + * @param {any} [reason] * @returns {Promise} */ abort(reason = undefined) { @@ -475,7 +476,7 @@ class WritableStreamDefaultWriter { } /** - * @param {any} chunk + * @param {any} [chunk] * @returns {Promise} */ write(chunk = undefined) { From 3ce39bbcb7f171ec2f2797e3328fd5d1e7ac5b4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Mon, 16 Jan 2023 03:44:00 +0100 Subject: [PATCH 163/191] src: hide kMaxDigestMultiplier outside HKDF impl There is no reason to expose this constant outside of the HKDF implementation, especially with such a generic name. PR-URL: https://github.com/nodejs/node/pull/46206 Reviewed-By: Ben Noordhuis Reviewed-By: Filip Skokan Reviewed-By: Yagiz Nizipli --- src/crypto/crypto_hkdf.cc | 4 ++++ src/crypto/crypto_hkdf.h | 2 -- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/crypto/crypto_hkdf.cc b/src/crypto/crypto_hkdf.cc index 43bf8a93505bb7..7663dd69374db7 100644 --- a/src/crypto/crypto_hkdf.cc +++ b/src/crypto/crypto_hkdf.cc @@ -87,6 +87,10 @@ Maybe HKDFTraits::AdditionalConfig( : info.ToByteSource(); params->length = args[offset + 4].As()->Value(); + // HKDF-Expand computes up to 255 HMAC blocks, each having as many bits as the + // output of the hash function. 255 is a hard limit because HKDF appends an + // 8-bit counter to each HMAC'd message, starting at 1. + constexpr size_t kMaxDigestMultiplier = 255; size_t max_length = EVP_MD_size(params->digest) * kMaxDigestMultiplier; if (params->length > max_length) { THROW_ERR_CRYPTO_INVALID_KEYLEN(env); diff --git a/src/crypto/crypto_hkdf.h b/src/crypto/crypto_hkdf.h index ef2d03c2091595..c4a537cef8a792 100644 --- a/src/crypto/crypto_hkdf.h +++ b/src/crypto/crypto_hkdf.h @@ -11,8 +11,6 @@ namespace node { namespace crypto { -static constexpr size_t kMaxDigestMultiplier = 255; - struct HKDFConfig final : public MemoryRetainer { CryptoJobMode mode; size_t length; From 08a6a61575ea6dc6055bd1166f23326aee285aee Mon Sep 17 00:00:00 2001 From: theanarkh Date: Mon, 16 Jan 2023 21:57:39 +0800 Subject: [PATCH 164/191] src,lib: the handle keeps loop alive in cluster rr mode PR-URL: https://github.com/nodejs/node/pull/46161 Reviewed-By: Ben Noordhuis Reviewed-By: Minwoo Jung --- lib/internal/cluster/child.js | 34 +++++++++++++++---- test/parallel/test-cluster-rr-handle-close.js | 18 ++++++++++ .../test-cluster-rr-handle-keep-loop-alive.js | 23 +++++++++++++ .../test-cluster-rr-handle-ref-unref.js | 20 +++++++++++ 4 files changed, 88 insertions(+), 7 deletions(-) create mode 100644 test/parallel/test-cluster-rr-handle-close.js create mode 100644 test/parallel/test-cluster-rr-handle-keep-loop-alive.js create mode 100644 test/parallel/test-cluster-rr-handle-ref-unref.js diff --git a/lib/internal/cluster/child.js b/lib/internal/cluster/child.js index f960878a70aca3..5078b4d701edba 100644 --- a/lib/internal/cluster/child.js +++ b/lib/internal/cluster/child.js @@ -15,6 +15,9 @@ const EventEmitter = require('events'); const { owner_symbol } = require('internal/async_hooks').symbols; const Worker = require('internal/cluster/worker'); const { internal, sendHelper } = require('internal/cluster/utils'); +const { TIMEOUT_MAX } = require('internal/timers'); +const { setInterval, clearInterval } = require('timers'); + const cluster = new EventEmitter(); const handles = new SafeMap(); const indexes = new SafeMap(); @@ -160,6 +163,21 @@ function rr(message, { indexesKey, index }, cb) { let key = message.key; + let fakeHandle = null; + + function ref() { + if (!fakeHandle) { + fakeHandle = setInterval(noop, TIMEOUT_MAX); + } + } + + function unref() { + if (fakeHandle) { + clearInterval(fakeHandle); + fakeHandle = null; + } + } + function listen(backlog) { // TODO(bnoordhuis) Send a message to the primary that tells it to // update the backlog size. The actual backlog should probably be @@ -175,7 +193,11 @@ function rr(message, { indexesKey, index }, cb) { // the primary. if (key === undefined) return; - + unref(); + // If the handle is the last handle in process, + // the parent process will delete the handle when worker process exits. + // So it is ok if the close message get lost. + // See the comments of https://github.com/nodejs/node/pull/46161 send({ act: 'close', key }); handles.delete(key); removeIndexesKey(indexesKey, index); @@ -189,12 +211,10 @@ function rr(message, { indexesKey, index }, cb) { return 0; } - // Faux handle. Mimics a TCPWrap with just enough fidelity to get away - // with it. Fools net.Server into thinking that it's backed by a real - // handle. Use a noop function for ref() and unref() because the control - // channel is going to keep the worker alive anyway. - const handle = { close, listen, ref: noop, unref: noop }; - + // Faux handle. net.Server is not associated with handle, + // so we control its state(ref or unref) by setInterval. + const handle = { close, listen, ref, unref }; + handle.ref(); if (message.sockname) { handle.getsockname = getsockname; // TCP handles only. } diff --git a/test/parallel/test-cluster-rr-handle-close.js b/test/parallel/test-cluster-rr-handle-close.js new file mode 100644 index 00000000000000..fb8e9740d665b7 --- /dev/null +++ b/test/parallel/test-cluster-rr-handle-close.js @@ -0,0 +1,18 @@ +'use strict'; + +const common = require('../common'); +const cluster = require('cluster'); +const net = require('net'); + +cluster.schedulingPolicy = cluster.SCHED_RR; + +if (cluster.isPrimary) { + const worker = cluster.fork(); + worker.on('exit', common.mustCall()); +} else { + const server = net.createServer(common.mustNotCall()); + server.listen(0, common.mustCall(() => { + process.channel.unref(); + server.close(); + })); +} diff --git a/test/parallel/test-cluster-rr-handle-keep-loop-alive.js b/test/parallel/test-cluster-rr-handle-keep-loop-alive.js new file mode 100644 index 00000000000000..0b18408a192ba1 --- /dev/null +++ b/test/parallel/test-cluster-rr-handle-keep-loop-alive.js @@ -0,0 +1,23 @@ +'use strict'; + +const common = require('../common'); +const cluster = require('cluster'); +const net = require('net'); +const assert = require('assert'); + +cluster.schedulingPolicy = cluster.SCHED_RR; + +if (cluster.isPrimary) { + let exited = false; + const worker = cluster.fork(); + worker.on('exit', () => { + exited = true; + }); + setTimeout(() => { + assert.ok(!exited); + worker.kill(); + }, 3000); +} else { + const server = net.createServer(common.mustNotCall()); + server.listen(0, common.mustCall(() => process.channel.unref())); +} diff --git a/test/parallel/test-cluster-rr-handle-ref-unref.js b/test/parallel/test-cluster-rr-handle-ref-unref.js new file mode 100644 index 00000000000000..403bbefd4dd69b --- /dev/null +++ b/test/parallel/test-cluster-rr-handle-ref-unref.js @@ -0,0 +1,20 @@ +'use strict'; + +const common = require('../common'); +const cluster = require('cluster'); +const net = require('net'); + +cluster.schedulingPolicy = cluster.SCHED_RR; + +if (cluster.isPrimary) { + const worker = cluster.fork(); + worker.on('exit', common.mustCall()); +} else { + const server = net.createServer(common.mustNotCall()); + server.listen(0, common.mustCall(() => { + server.ref(); + server.unref(); + process.channel.unref(); + })); + server.unref(); +} From be85d5a6eb42514dfecff231c0bf72b993b23c90 Mon Sep 17 00:00:00 2001 From: Mordy Tikotzky Date: Mon, 16 Jan 2023 18:56:07 -0500 Subject: [PATCH 165/191] doc: update to match changed `--dns-result-order` default Update documentation to match the changed `--dns-result-order` default value in Node.js 17.0.0. PR-URL: https://github.com/nodejs/node/pull/46148 Reviewed-By: Luigi Pinca Reviewed-By: Michael Dawson Reviewed-By: Matteo Collina Reviewed-By: Paolo Insogna Reviewed-By: Colin Ihrig --- doc/api/dns.md | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/doc/api/dns.md b/doc/api/dns.md index 5c7a9f16bb1239..59a56c7d1c2aa8 100644 --- a/doc/api/dns.md +++ b/doc/api/dns.md @@ -773,6 +773,10 @@ one of the [DNS error codes][]. added: - v16.4.0 - v14.18.0 +changes: + - version: v17.0.0 + pr-url: https://github.com/nodejs/node/pull/39987 + description: Changed default value to `verbatim`. --> * `order` {string} must be `'ipv4first'` or `'verbatim'`. @@ -783,7 +787,7 @@ Set the default value of `verbatim` in [`dns.lookup()`][] and * `ipv4first`: sets default `verbatim` `false`. * `verbatim`: sets default `verbatim` `true`. -The default is `ipv4first` and [`dns.setDefaultResultOrder()`][] have higher +The default is `verbatim` and [`dns.setDefaultResultOrder()`][] have higher priority than [`--dns-result-order`][]. When using [worker threads][], [`dns.setDefaultResultOrder()`][] from the main thread won't affect the default dns orders in workers. @@ -1328,6 +1332,10 @@ is one of the [DNS error codes][]. added: - v16.4.0 - v14.18.0 +changes: + - version: v17.0.0 + pr-url: https://github.com/nodejs/node/pull/39987 + description: Changed default value to `verbatim`. --> * `order` {string} must be `'ipv4first'` or `'verbatim'`. @@ -1338,7 +1346,7 @@ Set the default value of `verbatim` in [`dns.lookup()`][] and * `ipv4first`: sets default `verbatim` `false`. * `verbatim`: sets default `verbatim` `true`. -The default is `ipv4first` and [`dnsPromises.setDefaultResultOrder()`][] have +The default is `verbatim` and [`dnsPromises.setDefaultResultOrder()`][] have higher priority than [`--dns-result-order`][]. When using [worker threads][], [`dnsPromises.setDefaultResultOrder()`][] from the main thread won't affect the default dns orders in workers. From 3bf2371a5758ac2f775ee0ee66975565d86ea209 Mon Sep 17 00:00:00 2001 From: Jiawen Geng Date: Fri, 13 Jan 2023 01:41:46 +0000 Subject: [PATCH 166/191] build: add extra semi check MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/46194 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig Reviewed-By: Darshan Sen --- node.gyp | 1 + node.gypi | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/node.gyp b/node.gyp index 21e3ed50befd58..cec24aed034362 100644 --- a/node.gyp +++ b/node.gyp @@ -98,6 +98,7 @@ '-W', '-Wno-unused-parameter', '-Werror=undefined-inline', + '-Werror=extra-semi', ], }, diff --git a/node.gypi b/node.gypi index ecd2ea6ea87deb..e8dd7fe2be3100 100644 --- a/node.gypi +++ b/node.gypi @@ -27,7 +27,7 @@ 'conditions': [ [ 'clang==1', { - 'cflags': [ '-Werror=undefined-inline', ] + 'cflags': [ '-Werror=undefined-inline', '-Werror=extra-semi'] }], [ '"<(_type)"=="executable"', { 'msvs_settings': { From 340d76accbc0adb668cc9983cd36e076fe106bc0 Mon Sep 17 00:00:00 2001 From: Jiawen Geng Date: Mon, 16 Jan 2023 06:53:51 +0000 Subject: [PATCH 167/191] deps: cherrypick simdutf patch MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/46194 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig Reviewed-By: Darshan Sen --- deps/simdutf/simdutf.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/simdutf/simdutf.h b/deps/simdutf/simdutf.h index 3514f117dd2360..f3f63e78d94d7b 100644 --- a/deps/simdutf/simdutf.h +++ b/deps/simdutf/simdutf.h @@ -1599,7 +1599,7 @@ class implementation { * * @return a mask of all required `internal::instruction_set::` values */ - virtual uint32_t required_instruction_sets() const { return _required_instruction_sets; }; + virtual uint32_t required_instruction_sets() const { return _required_instruction_sets; } /** From 7c03a3d676c065f2f1a9e95d8d4689431bd48b59 Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Mon, 16 Jan 2023 22:38:23 -0500 Subject: [PATCH 168/191] deps: upgrade npm to 9.3.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/46193 Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Jiawen Geng Reviewed-By: Colin Ihrig Reviewed-By: Myles Borins Reviewed-By: Rich Trott --- Makefile | 2 +- deps/npm/docs/content/commands/npm-access.md | 2 + deps/npm/docs/content/commands/npm-adduser.md | 3 +- deps/npm/docs/content/commands/npm-ci.md | 4 +- deps/npm/docs/content/commands/npm-dedupe.md | 4 +- .../docs/content/commands/npm-find-dupes.md | 4 +- deps/npm/docs/content/commands/npm-init.md | 2 +- .../content/commands/npm-install-ci-test.md | 4 +- .../docs/content/commands/npm-install-test.md | 4 +- deps/npm/docs/content/commands/npm-install.md | 4 +- deps/npm/docs/content/commands/npm-link.md | 4 +- deps/npm/docs/content/commands/npm-login.md | 3 +- deps/npm/docs/content/commands/npm-ls.md | 2 +- deps/npm/docs/content/commands/npm-owner.md | 2 - deps/npm/docs/content/commands/npm-publish.md | 4 +- deps/npm/docs/content/commands/npm-root.md | 2 + deps/npm/docs/content/commands/npm-update.md | 4 +- deps/npm/docs/content/commands/npm.md | 6 +- .../docs/content/configuring-npm/install.md | 6 +- deps/npm/docs/content/using-npm/config.md | 11 +- deps/npm/docs/content/using-npm/registry.md | 2 +- deps/npm/docs/content/using-npm/removal.md | 4 +- deps/npm/docs/content/using-npm/scripts.md | 2 +- deps/npm/docs/output/commands/npm-access.html | 1 + .../npm/docs/output/commands/npm-adduser.html | 3 +- deps/npm/docs/output/commands/npm-ci.html | 4 +- deps/npm/docs/output/commands/npm-dedupe.html | 4 +- .../docs/output/commands/npm-find-dupes.html | 4 +- deps/npm/docs/output/commands/npm-init.html | 2 +- .../output/commands/npm-install-ci-test.html | 4 +- .../output/commands/npm-install-test.html | 4 +- .../npm/docs/output/commands/npm-install.html | 4 +- deps/npm/docs/output/commands/npm-link.html | 4 +- deps/npm/docs/output/commands/npm-login.html | 3 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm-owner.html | 1 - .../npm/docs/output/commands/npm-publish.html | 4 +- deps/npm/docs/output/commands/npm-root.html | 1 + deps/npm/docs/output/commands/npm-update.html | 4 +- deps/npm/docs/output/commands/npm.html | 5 +- .../docs/output/configuring-npm/install.html | 6 +- deps/npm/docs/output/using-npm/config.html | 11 +- deps/npm/docs/output/using-npm/registry.html | 2 +- deps/npm/docs/output/using-npm/removal.html | 4 +- deps/npm/docs/output/using-npm/scripts.html | 2 +- deps/npm/lib/arborist-cmd.js | 31 +- deps/npm/lib/base-command.js | 81 +- deps/npm/lib/cli.js | 13 +- deps/npm/lib/commands/access.js | 2 - deps/npm/lib/commands/adduser.js | 2 - deps/npm/lib/commands/audit.js | 2 +- deps/npm/lib/commands/cache.js | 15 +- deps/npm/lib/commands/ci.js | 9 +- deps/npm/lib/commands/completion.js | 25 +- deps/npm/lib/commands/config.js | 11 +- deps/npm/lib/commands/diff.js | 5 +- deps/npm/lib/commands/dist-tag.js | 17 +- deps/npm/lib/commands/edit.js | 16 +- deps/npm/lib/commands/exec.js | 37 +- deps/npm/lib/commands/find-dupes.js | 2 +- deps/npm/lib/commands/fund.js | 95 +- deps/npm/lib/commands/help-search.js | 9 +- deps/npm/lib/commands/help.js | 99 +- deps/npm/lib/commands/hook.js | 18 +- deps/npm/lib/commands/init.js | 39 +- deps/npm/lib/commands/install-ci-test.js | 2 +- deps/npm/lib/commands/install-test.js | 2 +- deps/npm/lib/commands/login.js | 2 - deps/npm/lib/commands/logout.js | 2 - deps/npm/lib/commands/ls.js | 10 +- deps/npm/lib/commands/org.js | 11 +- deps/npm/lib/commands/outdated.js | 4 +- deps/npm/lib/commands/owner.js | 7 +- deps/npm/lib/commands/pack.js | 5 +- deps/npm/lib/commands/ping.js | 7 +- deps/npm/lib/commands/pkg.js | 7 +- deps/npm/lib/commands/prefix.js | 1 - deps/npm/lib/commands/profile.js | 4 +- deps/npm/lib/commands/publish.js | 7 +- deps/npm/lib/commands/query.js | 5 +- deps/npm/lib/commands/restart.js | 3 +- deps/npm/lib/commands/root.js | 1 - deps/npm/lib/commands/run-script.js | 17 +- deps/npm/lib/commands/search.js | 1 - deps/npm/lib/commands/start.js | 3 +- deps/npm/lib/commands/stop.js | 3 +- deps/npm/lib/commands/test.js | 3 +- deps/npm/lib/commands/token.js | 13 +- deps/npm/lib/commands/uninstall.js | 17 +- deps/npm/lib/commands/unpublish.js | 7 +- deps/npm/lib/commands/update.js | 4 +- deps/npm/lib/commands/version.js | 15 +- deps/npm/lib/commands/view.js | 89 +- deps/npm/lib/commands/whoami.js | 1 - deps/npm/lib/lifecycle-cmd.js | 6 +- deps/npm/lib/npm.js | 201 +- deps/npm/lib/package-url-cmd.js | 7 +- deps/npm/lib/utils/config/definitions.js | 16 +- deps/npm/lib/utils/error-message.js | 22 +- deps/npm/lib/utils/exit-handler.js | 20 +- deps/npm/lib/utils/explain-dep.js | 6 +- deps/npm/lib/utils/log-file.js | 8 +- deps/npm/lib/utils/npm-usage.js | 3 +- deps/npm/lib/utils/open-url.js | 2 +- deps/npm/lib/utils/queryable.js | 15 +- deps/npm/lib/utils/read-user-info.js | 6 +- deps/npm/lib/utils/reify-output.js | 4 +- deps/npm/lib/workspaces/get-workspaces.js | 2 +- deps/npm/man/man1/npm-access.1 | 4 +- deps/npm/man/man1/npm-adduser.1 | 4 +- deps/npm/man/man1/npm-audit.1 | 2 +- deps/npm/man/man1/npm-bugs.1 | 2 +- deps/npm/man/man1/npm-cache.1 | 2 +- deps/npm/man/man1/npm-ci.1 | 6 +- deps/npm/man/man1/npm-completion.1 | 2 +- deps/npm/man/man1/npm-config.1 | 2 +- deps/npm/man/man1/npm-dedupe.1 | 6 +- deps/npm/man/man1/npm-deprecate.1 | 2 +- deps/npm/man/man1/npm-diff.1 | 2 +- deps/npm/man/man1/npm-dist-tag.1 | 2 +- deps/npm/man/man1/npm-docs.1 | 2 +- deps/npm/man/man1/npm-doctor.1 | 2 +- deps/npm/man/man1/npm-edit.1 | 2 +- deps/npm/man/man1/npm-exec.1 | 2 +- deps/npm/man/man1/npm-explain.1 | 2 +- deps/npm/man/man1/npm-explore.1 | 2 +- deps/npm/man/man1/npm-find-dupes.1 | 6 +- deps/npm/man/man1/npm-fund.1 | 2 +- deps/npm/man/man1/npm-help-search.1 | 2 +- deps/npm/man/man1/npm-help.1 | 2 +- deps/npm/man/man1/npm-hook.1 | 2 +- deps/npm/man/man1/npm-init.1 | 4 +- deps/npm/man/man1/npm-install-ci-test.1 | 6 +- deps/npm/man/man1/npm-install-test.1 | 6 +- deps/npm/man/man1/npm-install.1 | 6 +- deps/npm/man/man1/npm-link.1 | 6 +- deps/npm/man/man1/npm-login.1 | 4 +- deps/npm/man/man1/npm-logout.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 4 +- deps/npm/man/man1/npm-org.1 | 2 +- deps/npm/man/man1/npm-outdated.1 | 2 +- deps/npm/man/man1/npm-owner.1 | 4 +- deps/npm/man/man1/npm-pack.1 | 2 +- deps/npm/man/man1/npm-ping.1 | 2 +- deps/npm/man/man1/npm-pkg.1 | 2 +- deps/npm/man/man1/npm-prefix.1 | 2 +- deps/npm/man/man1/npm-profile.1 | 2 +- deps/npm/man/man1/npm-prune.1 | 2 +- deps/npm/man/man1/npm-publish.1 | 4 +- deps/npm/man/man1/npm-query.1 | 2 +- deps/npm/man/man1/npm-rebuild.1 | 2 +- deps/npm/man/man1/npm-repo.1 | 2 +- deps/npm/man/man1/npm-restart.1 | 2 +- deps/npm/man/man1/npm-root.1 | 4 +- deps/npm/man/man1/npm-run-script.1 | 2 +- deps/npm/man/man1/npm-search.1 | 2 +- deps/npm/man/man1/npm-shrinkwrap.1 | 2 +- deps/npm/man/man1/npm-star.1 | 2 +- deps/npm/man/man1/npm-stars.1 | 2 +- deps/npm/man/man1/npm-start.1 | 2 +- deps/npm/man/man1/npm-stop.1 | 2 +- deps/npm/man/man1/npm-team.1 | 2 +- deps/npm/man/man1/npm-test.1 | 2 +- deps/npm/man/man1/npm-token.1 | 2 +- deps/npm/man/man1/npm-uninstall.1 | 2 +- deps/npm/man/man1/npm-unpublish.1 | 2 +- deps/npm/man/man1/npm-unstar.1 | 2 +- deps/npm/man/man1/npm-update.1 | 6 +- deps/npm/man/man1/npm-version.1 | 2 +- deps/npm/man/man1/npm-view.1 | 2 +- deps/npm/man/man1/npm-whoami.1 | 2 +- deps/npm/man/man1/npm.1 | 8 +- deps/npm/man/man1/npx.1 | 2 +- deps/npm/man/man5/folders.5 | 2 +- deps/npm/man/man5/install.5 | 8 +- deps/npm/man/man5/npm-global.5 | 2 +- deps/npm/man/man5/npm-json.5 | 2 +- deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +- deps/npm/man/man5/npmrc.5 | 2 +- deps/npm/man/man5/package-json.5 | 2 +- deps/npm/man/man5/package-lock-json.5 | 2 +- deps/npm/man/man7/config.7 | 10 +- deps/npm/man/man7/dependency-selectors.7 | 2 +- deps/npm/man/man7/developers.7 | 2 +- deps/npm/man/man7/logging.7 | 2 +- deps/npm/man/man7/orgs.7 | 2 +- deps/npm/man/man7/package-spec.7 | 2 +- deps/npm/man/man7/registry.7 | 4 +- deps/npm/man/man7/removal.7 | 4 +- deps/npm/man/man7/scope.7 | 2 +- deps/npm/man/man7/scripts.7 | 4 +- deps/npm/man/man7/workspaces.7 | 2 +- .../arborist/lib/arborist/build-ideal-tree.js | 4 +- .../arborist/lib/arborist/load-virtual.js | 4 +- .../arborist/lib/arborist/load-workspaces.js | 22 +- .../@npmcli/arborist/lib/override-resolves.js | 2 +- .../@npmcli/arborist/lib/override-set.js | 29 +- .../@npmcli/arborist/lib/place-dep.js | 3 +- .../@npmcli/arborist/package.json | 7 +- .../node_modules/@npmcli/config/lib/index.js | 49 +- .../node_modules/@npmcli/config/package.json | 4 +- .../node_modules/libnpmaccess/package.json | 2 +- deps/npm/node_modules/libnpmdiff/package.json | 6 +- deps/npm/node_modules/libnpmexec/package.json | 8 +- deps/npm/node_modules/libnpmfund/package.json | 6 +- deps/npm/node_modules/libnpmhook/package.json | 2 +- deps/npm/node_modules/libnpmorg/package.json | 2 +- deps/npm/node_modules/libnpmpack/package.json | 6 +- .../node_modules/libnpmpublish/package.json | 4 +- .../node_modules/libnpmsearch/package.json | 2 +- deps/npm/node_modules/libnpmteam/package.json | 2 +- .../node_modules/libnpmversion/package.json | 2 +- .../node_modules/minipass/LICENSE | 15 - .../node_modules/minipass/index.d.ts | 155 - .../node_modules/minipass/index.js | 649 -- .../node_modules/minipass/package.json | 56 - .../node_modules/minipass-fetch/package.json | 11 +- deps/npm/package.json | 20 +- .../test/lib/commands/audit.js.test.cjs | 4 +- .../test/lib/commands/config.js.test.cjs | 1 - .../test/lib/commands/diff.js.test.cjs | 88 + .../test/lib/commands/dist-tag.js.test.cjs | 25 +- .../test/lib/commands/doctor.js.test.cjs | 96 +- .../test/lib/commands/fund.js.test.cjs | 44 +- .../test/lib/commands/init.js.test.cjs | 55 +- .../test/lib/commands/link.js.test.cjs | 34 +- .../test/lib/commands/ls.js.test.cjs | 388 +- .../test/lib/commands/outdated.js.test.cjs | 161 +- .../test/lib/commands/profile.js.test.cjs | 3 +- .../test/lib/commands/query.js.test.cjs | 44 +- .../test/lib/commands/stars.js.test.cjs | 1 - .../test/lib/commands/team.js.test.cjs | 15 +- .../tap-snapshots/test/lib/docs.js.test.cjs | 77 +- .../test/lib/utils/error-message.js.test.cjs | 89 +- .../test/lib/utils/exit-handler.js.test.cjs | 26 +- .../test/lib/utils/log-file.js.test.cjs | 2 +- .../test/lib/utils/reify-output.js.test.cjs | 438 +- deps/npm/test/bin/npm-cli.js | 6 +- deps/npm/test/bin/npx-cli.js | 72 +- deps/npm/test/fixtures/clean-snapshot.js | 30 +- deps/npm/test/fixtures/merge-conflict.json | 36 + deps/npm/test/fixtures/mock-globals.js | 34 +- deps/npm/test/fixtures/mock-npm.js | 353 +- deps/npm/test/fixtures/sandbox.js | 6 +- deps/npm/test/fixtures/tmock.js | 27 + deps/npm/test/index.js | 32 +- deps/npm/test/lib/arborist-cmd.js | 201 +- deps/npm/test/lib/cli.js | 43 +- deps/npm/test/lib/commands/audit.js | 49 +- deps/npm/test/lib/commands/bugs.js | 142 +- deps/npm/test/lib/commands/config.js | 8 +- deps/npm/test/lib/commands/diff.js | 1691 +++-- deps/npm/test/lib/commands/dist-tag.js | 336 +- deps/npm/test/lib/commands/docs.js | 172 +- deps/npm/test/lib/commands/doctor.js | 40 +- deps/npm/test/lib/commands/edit.js | 8 +- deps/npm/test/lib/commands/exec.js | 9 - deps/npm/test/lib/commands/explain.js | 416 +- deps/npm/test/lib/commands/explore.js | 352 +- deps/npm/test/lib/commands/fund.js | 700 +- deps/npm/test/lib/commands/help-search.js | 145 +- deps/npm/test/lib/commands/help.js | 420 +- deps/npm/test/lib/commands/hook.js | 419 +- deps/npm/test/lib/commands/init.js | 521 +- deps/npm/test/lib/commands/install.js | 361 +- deps/npm/test/lib/commands/link.js | 544 +- deps/npm/test/lib/commands/ll.js | 5 +- deps/npm/test/lib/commands/logout.js | 255 +- deps/npm/test/lib/commands/ls.js | 6140 +++++++++-------- deps/npm/test/lib/commands/org.js | 342 +- deps/npm/test/lib/commands/outdated.js | 610 +- deps/npm/test/lib/commands/owner.js | 38 +- deps/npm/test/lib/commands/pack.js | 16 - deps/npm/test/lib/commands/pkg.js | 383 +- deps/npm/test/lib/commands/profile.js | 694 +- deps/npm/test/lib/commands/prune.js | 2 +- deps/npm/test/lib/commands/publish.js | 93 +- deps/npm/test/lib/commands/query.js | 32 +- deps/npm/test/lib/commands/rebuild.js | 231 +- deps/npm/test/lib/commands/repo.js | 110 +- deps/npm/test/lib/commands/restart.js | 4 +- deps/npm/test/lib/commands/run-script.js | 761 +- deps/npm/test/lib/commands/set.js | 99 +- deps/npm/test/lib/commands/stars.js | 105 +- deps/npm/test/lib/commands/start.js | 4 +- deps/npm/test/lib/commands/stop.js | 4 +- deps/npm/test/lib/commands/team.js | 294 +- deps/npm/test/lib/commands/test.js | 4 +- deps/npm/test/lib/commands/token.js | 752 +- deps/npm/test/lib/commands/uninstall.js | 298 +- deps/npm/test/lib/commands/update.js | 201 +- deps/npm/test/lib/commands/version.js | 489 +- deps/npm/test/lib/commands/view.js | 196 +- deps/npm/test/lib/docs.js | 2 +- deps/npm/test/lib/fixtures/mock-globals.js | 14 +- deps/npm/test/lib/lifecycle-cmd.js | 23 +- deps/npm/test/lib/load-all-commands.js | 35 +- deps/npm/test/lib/npm.js | 58 +- deps/npm/test/lib/utils/audit-error.js | 136 +- .../lib/utils/completion/installed-deep.js | 3 +- .../lib/utils/completion/installed-shallow.js | 7 +- deps/npm/test/lib/utils/config/definitions.js | 14 +- deps/npm/test/lib/utils/display.js | 7 +- deps/npm/test/lib/utils/error-message.js | 196 +- deps/npm/test/lib/utils/exit-handler.js | 61 +- deps/npm/test/lib/utils/explain-dep.js | 11 +- deps/npm/test/lib/utils/log-file.js | 20 +- deps/npm/test/lib/utils/log-shim.js | 3 +- deps/npm/test/lib/utils/open-url-prompt.js | 19 +- deps/npm/test/lib/utils/open-url.js | 3 +- deps/npm/test/lib/utils/otplease.js | 159 +- deps/npm/test/lib/utils/pulse-till-done.js | 3 +- deps/npm/test/lib/utils/read-user-info.js | 3 +- deps/npm/test/lib/utils/reify-finish.js | 8 +- deps/npm/test/lib/utils/reify-output.js | 231 +- deps/npm/test/lib/utils/tar.js | 3 +- deps/npm/test/lib/utils/timers.js | 3 +- deps/npm/test/lib/utils/update-notifier.js | 12 +- deps/npm/test/lib/utils/web-auth.js | 3 +- 319 files changed, 11263 insertions(+), 13277 deletions(-) delete mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minipass/LICENSE delete mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.d.ts delete mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.js delete mode 100644 deps/npm/node_modules/minipass-fetch/node_modules/minipass/package.json create mode 100644 deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs create mode 100644 deps/npm/test/fixtures/merge-conflict.json create mode 100644 deps/npm/test/fixtures/tmock.js diff --git a/Makefile b/Makefile index ab696b45b645ec..94013466239e9c 100644 --- a/Makefile +++ b/Makefile @@ -1550,7 +1550,7 @@ CONFLICT_RE=^>>>>>>> [[:xdigit:]]+|^<<<<<<< [[:alpha:]]+ # Related CI job: node-test-linter lint-ci: lint-js-ci lint-cpp lint-py lint-md lint-addon-docs lint-yaml-build lint-yaml - @if ! ( grep -IEqrs "$(CONFLICT_RE)" --exclude="error-message.js" benchmark deps doc lib src test tools ) \ + @if ! ( grep -IEqrs "$(CONFLICT_RE)" --exclude="error-message.js" --exclude="merge-conflict.json" benchmark deps doc lib src test tools ) \ && ! ( $(FIND) . -maxdepth 1 -type f | xargs grep -IEqs "$(CONFLICT_RE)" ); then \ exit 0 ; \ else \ diff --git a/deps/npm/docs/content/commands/npm-access.md b/deps/npm/docs/content/commands/npm-access.md index f2078e1c9c38dd..7c7e1ffdcc22e5 100644 --- a/deps/npm/docs/content/commands/npm-access.md +++ b/deps/npm/docs/content/commands/npm-access.md @@ -16,6 +16,8 @@ npm access grant [] npm access revoke [] ``` +Note: This command is unaware of workspaces. + ### Description Used to set access controls on private packages. diff --git a/deps/npm/docs/content/commands/npm-adduser.md b/deps/npm/docs/content/commands/npm-adduser.md index f0cd57be25a2b6..bc7d888a2f3d2d 100644 --- a/deps/npm/docs/content/commands/npm-adduser.md +++ b/deps/npm/docs/content/commands/npm-adduser.md @@ -67,7 +67,8 @@ npm init --scope=@foo --yes * Default: "web" * Type: "legacy" or "web" -What authentication strategy to use with `login`. +What authentication strategy to use with `login`. Note that if an `otp` +config is given, this value will always be set to `legacy`. ### See Also diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index 4a5caf7d0c691f..1e9220b6a63a76 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -138,7 +138,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `omit` @@ -173,7 +173,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-dedupe.md b/deps/npm/docs/content/commands/npm-dedupe.md index 80353bad5d0d7a..c32b50fbc5fb13 100644 --- a/deps/npm/docs/content/commands/npm-dedupe.md +++ b/deps/npm/docs/content/commands/npm-dedupe.md @@ -109,7 +109,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -126,7 +126,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-find-dupes.md b/deps/npm/docs/content/commands/npm-find-dupes.md index a3ef44eb5b7fb1..e80f338239a84a 100644 --- a/deps/npm/docs/content/commands/npm-find-dupes.md +++ b/deps/npm/docs/content/commands/npm-find-dupes.md @@ -49,7 +49,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -66,7 +66,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-init.md b/deps/npm/docs/content/commands/npm-init.md index cf8bb3936acea5..d8d7acee77f018 100644 --- a/deps/npm/docs/content/commands/npm-init.md +++ b/deps/npm/docs/content/commands/npm-init.md @@ -7,7 +7,7 @@ description: Create a package.json file ### Synopsis ```bash -npm init (same as `npx ) +npm init (same as `npx `) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/docs/content/commands/npm-install-ci-test.md b/deps/npm/docs/content/commands/npm-install-ci-test.md index 9fd7c267b102f5..c7a75510232857 100644 --- a/deps/npm/docs/content/commands/npm-install-ci-test.md +++ b/deps/npm/docs/content/commands/npm-install-ci-test.md @@ -84,7 +84,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `omit` @@ -119,7 +119,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md index 5642472630b530..464a4487481747 100644 --- a/deps/npm/docs/content/commands/npm-install-test.md +++ b/deps/npm/docs/content/commands/npm-install-test.md @@ -85,7 +85,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `omit` @@ -120,7 +120,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 3604aab4b940f2..8353ea25a93b15 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -475,7 +475,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `omit` @@ -510,7 +510,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-link.md b/deps/npm/docs/content/commands/npm-link.md index 09459cc0ca8a50..9de0ff2c0a59de 100644 --- a/deps/npm/docs/content/commands/npm-link.md +++ b/deps/npm/docs/content/commands/npm-link.md @@ -176,7 +176,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `strict-peer-deps` @@ -193,7 +193,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm-login.md b/deps/npm/docs/content/commands/npm-login.md index 4feb7150400268..00f10ad95eeb52 100644 --- a/deps/npm/docs/content/commands/npm-login.md +++ b/deps/npm/docs/content/commands/npm-login.md @@ -74,7 +74,8 @@ npm init --scope=@foo --yes * Default: "web" * Type: "legacy" or "web" -What authentication strategy to use with `login`. +What authentication strategy to use with `login`. Note that if an `otp` +config is given, this value will always be set to `legacy`. ### See Also diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 2ae99e7e7c16e3..d8b6f4a7de63dc 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@9.2.0 /path/to/npm +npm@9.3.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm-owner.md b/deps/npm/docs/content/commands/npm-owner.md index c5bace6b2bcc94..2b04f635b1372b 100644 --- a/deps/npm/docs/content/commands/npm-owner.md +++ b/deps/npm/docs/content/commands/npm-owner.md @@ -14,8 +14,6 @@ npm owner ls alias: author ``` -Note: This command is unaware of workspaces. - ### Description Manage ownership of published packages. diff --git a/deps/npm/docs/content/commands/npm-publish.md b/deps/npm/docs/content/commands/npm-publish.md index 09756aedf02724..b23d9ad8a1fb2a 100644 --- a/deps/npm/docs/content/commands/npm-publish.md +++ b/deps/npm/docs/content/commands/npm-publish.md @@ -107,8 +107,8 @@ tarball that will be compared with the local files by default. current level * Type: null, "restricted", or "public" -If do not want your scoped package to be publicly viewable (and installable) -set `--access=restricted`. +If you do not want your scoped package to be publicly viewable (and +installable) set `--access=restricted`. Unscoped packages can not be set to `restricted`. diff --git a/deps/npm/docs/content/commands/npm-root.md b/deps/npm/docs/content/commands/npm-root.md index 89195744c9d18d..b34321eb961394 100644 --- a/deps/npm/docs/content/commands/npm-root.md +++ b/deps/npm/docs/content/commands/npm-root.md @@ -10,6 +10,8 @@ description: Display npm root npm root ``` +Note: This command is unaware of workspaces. + ### Description Print the effective `node_modules` folder to standard out. diff --git a/deps/npm/docs/content/commands/npm-update.md b/deps/npm/docs/content/commands/npm-update.md index cdd3190828a2cb..16c8e4df66d571 100644 --- a/deps/npm/docs/content/commands/npm-update.md +++ b/deps/npm/docs/content/commands/npm-update.md @@ -215,7 +215,7 @@ de-duplicating. Sets `--install-strategy=nested`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `omit` @@ -250,7 +250,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index be17431b58f360..2396b40cf5e8c7 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -10,9 +10,11 @@ description: javascript package manager npm ``` +Note: This command is unaware of workspaces. + ### Version -9.2.0 +9.3.0 ### Description @@ -132,7 +134,7 @@ npm is extremely configurable. It reads its configuration options from in the cli, env, or user config, then that file is parsed instead. * Defaults: npm's default configuration options are defined in - lib/utils/config-defs.js. These must not be changed. + `lib/utils/config/definitions.js`. These must not be changed. See [`config`](/using-npm/config) for much much more information. diff --git a/deps/npm/docs/content/configuring-npm/install.md b/deps/npm/docs/content/configuring-npm/install.md index 43fce4868ba973..18b4421687ba9a 100644 --- a/deps/npm/docs/content/configuring-npm/install.md +++ b/deps/npm/docs/content/configuring-npm/install.md @@ -17,11 +17,11 @@ run npm packages globally. ### Overview - [Checking your version of npm and - Node.js](#checking-your-version-of-npm-and-node-js) + Node.js](#checking-your-version-of-npm-and-nodejs) - [Using a Node version manager to install Node.js and - npm](#using-a-node-version-manager-to-install-node-js-and-npm) + npm](#using-a-node-version-manager-to-install-nodejs-and-npm) - [Using a Node installer to install Node.js and - npm](#using-a-node-installer-to-install-node-js-and-npm) + npm](#using-a-node-installer-to-install-nodejs-and-npm) ### Checking your version of npm and Node.js diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md index 0eda3ec5367316..c70f51d13bd381 100644 --- a/deps/npm/docs/content/using-npm/config.md +++ b/deps/npm/docs/content/using-npm/config.md @@ -142,8 +142,8 @@ safer to use a registry-provided authentication bearer token stored in the current level * Type: null, "restricted", or "public" -If do not want your scoped package to be publicly viewable (and installable) -set `--access=restricted`. +If you do not want your scoped package to be publicly viewable (and +installable) set `--access=restricted`. Unscoped packages can not be set to `restricted`. @@ -192,7 +192,8 @@ exit code. * Default: "web" * Type: "legacy" or "web" -What authentication strategy to use with `login`. +What authentication strategy to use with `login`. Note that if an `otp` +config is given, this value will always be set to `legacy`. #### `before` @@ -1240,7 +1241,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's `peerDependencies` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If `--strict-peer-deps` is set, then this warning is treated as a failure. @@ -1521,7 +1522,7 @@ Alias for `--include=dev`. `--install-strategy=shallow` Only install direct dependencies in the top level `node_modules`, but hoist -on deeper dependendencies. Sets `--install-strategy=shallow`. +on deeper dependencies. Sets `--install-strategy=shallow`. #### `init.author.email` diff --git a/deps/npm/docs/content/using-npm/registry.md b/deps/npm/docs/content/using-npm/registry.md index 8d5ac94160b33f..035ede5b32a3a1 100644 --- a/deps/npm/docs/content/using-npm/registry.md +++ b/deps/npm/docs/content/using-npm/registry.md @@ -35,7 +35,7 @@ Authentication configuration such as auth tokens and certificates are configured specifically scoped to an individual registry. See [Auth Related Configuration](/configuring-npm/npmrc#auth-related-configuration) -When the default registry is used in a package-lock or shrinkwrap is has the +When the default registry is used in a package-lock or shrinkwrap it has the special meaning of "the currently configured registry". If you create a lock file while using the default registry you can switch to another registry and npm will install packages from the new registry, but if you create a lock diff --git a/deps/npm/docs/content/using-npm/removal.md b/deps/npm/docs/content/using-npm/removal.md index c5e13b6741b6de..25dbb80baa7a95 100644 --- a/deps/npm/docs/content/using-npm/removal.md +++ b/deps/npm/docs/content/using-npm/removal.md @@ -28,8 +28,8 @@ continue reading. Note that this is only necessary for globally-installed packages. Local installs are completely contained within a project's `node_modules` -folder. Delete that folder, and everything is gone less a package's -install script is particularly ill-behaved). +folder. Delete that folder, and everything is gone unless a package's +install script is particularly ill-behaved. This assumes that you installed node and npm in the default place. If you configured node with a different `--prefix`, or installed npm with a diff --git a/deps/npm/docs/content/using-npm/scripts.md b/deps/npm/docs/content/using-npm/scripts.md index 9bc2bf32fe4ff7..bf212c5db24f77 100644 --- a/deps/npm/docs/content/using-npm/scripts.md +++ b/deps/npm/docs/content/using-npm/scripts.md @@ -63,7 +63,7 @@ situations. These scripts happen in addition to the `pre`, `post`, * Runs BEFORE the package is prepared and packed, ONLY on `npm publish`. **prepack** -* Runs BEFORE a tarball is packed (on "`npm pack`", "`npm publish`", and when installing a git dependencies). +* Runs BEFORE a tarball is packed (on "`npm pack`", "`npm publish`", and when installing a git dependency). * NOTE: "`npm run pack`" is NOT the same as "`npm pack`". "`npm run pack`" is an arbitrary user defined script name, where as, "`npm pack`" is a CLI defined command. **postpack** diff --git a/deps/npm/docs/output/commands/npm-access.html b/deps/npm/docs/output/commands/npm-access.html index 25569aabc1abd5..9a0bb09a9cd966 100644 --- a/deps/npm/docs/output/commands/npm-access.html +++ b/deps/npm/docs/output/commands/npm-access.html @@ -154,6 +154,7 @@

Table of contents

npm access grant <read-only|read-write> <scope:team> [<package>] npm access revoke <scope:team> [<package>] +

Note: This command is unaware of workspaces.

Description

Used to set access controls on private packages.

For all of the subcommands, npm access will perform actions on the packages diff --git a/deps/npm/docs/output/commands/npm-adduser.html b/deps/npm/docs/output/commands/npm-adduser.html index 77cde53f2fd276..e515eaa5adabda 100644 --- a/deps/npm/docs/output/commands/npm-adduser.html +++ b/deps/npm/docs/output/commands/npm-adduser.html @@ -190,7 +190,8 @@

auth-type

  • Default: "web"
  • Type: "legacy" or "web"
  • -

    What authentication strategy to use with login.

    +

    What authentication strategy to use with login. Note that if an otp +config is given, this value will always be set to legacy.

    See Also

    • npm registry
    • diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 76223e5b47d669..07f07b5630562e 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -256,7 +256,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    omit

    • Default: 'dev' if the NODE_ENV environment variable is set to @@ -284,7 +284,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html index fb457d7d41e58b..143398d8d7a27b 100644 --- a/deps/npm/docs/output/commands/npm-dedupe.html +++ b/deps/npm/docs/output/commands/npm-dedupe.html @@ -225,7 +225,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    strict-peer-deps

    • Default: false
    • @@ -239,7 +239,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html index d527c5b4f686ba..0dd2bc375069ee 100644 --- a/deps/npm/docs/output/commands/npm-find-dupes.html +++ b/deps/npm/docs/output/commands/npm-find-dupes.html @@ -182,7 +182,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    strict-peer-deps

    • Default: false
    • @@ -196,7 +196,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html index 0f539a6ad44785..e515d36c85d2ea 100644 --- a/deps/npm/docs/output/commands/npm-init.html +++ b/deps/npm/docs/output/commands/npm-init.html @@ -146,7 +146,7 @@

      Table of contents

      Synopsis

      -
      npm init <package-spec> (same as `npx <package-spec>)
      +
      npm init <package-spec> (same as `npx <package-spec>`)
       npm init <@scope> (same as `npx <@scope>/create`)
       
       aliases: create, innit
      diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
      index 634d933dc62219..0d3dea5b4cc530 100644
      --- a/deps/npm/docs/output/commands/npm-install-ci-test.html
      +++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
      @@ -213,7 +213,7 @@ 

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    omit

    • Default: 'dev' if the NODE_ENV environment variable is set to @@ -241,7 +241,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html index 282d7755f12329..ff53148d4fb09d 100644 --- a/deps/npm/docs/output/commands/npm-install-test.html +++ b/deps/npm/docs/output/commands/npm-install-test.html @@ -214,7 +214,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    omit

    • Default: 'dev' if the NODE_ENV environment variable is set to @@ -242,7 +242,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index 83be6e751e6c34..40dc5d32b3d746 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -540,7 +540,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    omit

    • Default: 'dev' if the NODE_ENV environment variable is set to @@ -568,7 +568,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html index bf6e7f9a2f9348..d23f83826aa9d7 100644 --- a/deps/npm/docs/output/commands/npm-link.html +++ b/deps/npm/docs/output/commands/npm-link.html @@ -277,7 +277,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    strict-peer-deps

    • Default: false
    • @@ -291,7 +291,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm-login.html b/deps/npm/docs/output/commands/npm-login.html index 8b47a64c8e8cbb..5ffff8aa627168 100644 --- a/deps/npm/docs/output/commands/npm-login.html +++ b/deps/npm/docs/output/commands/npm-login.html @@ -194,7 +194,8 @@

      auth-type

    • Default: "web"
    • Type: "legacy" or "web"
    -

    What authentication strategy to use with login.

    +

    What authentication strategy to use with login. Note that if an otp +config is given, this value will always be set to legacy.

    See Also

    • npm registry
    • diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 6ec045612fa513..ffe72db7c54b8c 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

      Description

      the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

      -
      npm@9.2.0 /path/to/npm
      +
      npm@9.3.0 /path/to/npm
       └─┬ init-package-json@0.0.4
         └── promzard@0.1.5
       
      diff --git a/deps/npm/docs/output/commands/npm-owner.html b/deps/npm/docs/output/commands/npm-owner.html index 13cf1d2209dfb3..3566602a011109 100644 --- a/deps/npm/docs/output/commands/npm-owner.html +++ b/deps/npm/docs/output/commands/npm-owner.html @@ -152,7 +152,6 @@

      Table of contents

      alias: author
      -

      Note: This command is unaware of workspaces.

      Description

      Manage ownership of published packages.

        diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html index 864a03a5cbb0a8..80a136308a19c5 100644 --- a/deps/npm/docs/output/commands/npm-publish.html +++ b/deps/npm/docs/output/commands/npm-publish.html @@ -235,8 +235,8 @@

        access

        current level
      • Type: null, "restricted", or "public"
      -

      If do not want your scoped package to be publicly viewable (and installable) -set --access=restricted.

      +

      If you do not want your scoped package to be publicly viewable (and +installable) set --access=restricted.

      Unscoped packages can not be set to restricted.

      Note: This defaults to not changing the current access level for existing packages. Specifying a value of restricted or public during publish will diff --git a/deps/npm/docs/output/commands/npm-root.html b/deps/npm/docs/output/commands/npm-root.html index 292376737f4ba9..24436056c97676 100644 --- a/deps/npm/docs/output/commands/npm-root.html +++ b/deps/npm/docs/output/commands/npm-root.html @@ -148,6 +148,7 @@

      Table of contents

      Synopsis

      npm root
       
      +

      Note: This command is unaware of workspaces.

      Description

      Print the effective node_modules folder to standard out.

      Useful for using npm in shell scripts that do things with the diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html index fbb1e8c133054b..162eb4fe54b99e 100644 --- a/deps/npm/docs/output/commands/npm-update.html +++ b/deps/npm/docs/output/commands/npm-update.html @@ -304,7 +304,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    omit

    • Default: 'dev' if the NODE_ENV environment variable is set to @@ -332,7 +332,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      package-lock

      diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 94fb3e216804cd..cb6d4ea3c7b81e 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -148,8 +148,9 @@

      Table of contents

      Synopsis

      npm
       
      +

      Note: This command is unaware of workspaces.

      Version

      -

      9.2.0

      +

      9.3.0

      Description

      npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency @@ -247,7 +248,7 @@

      Configuration

      in the cli, env, or user config, then that file is parsed instead.
    • Defaults: npm's default configuration options are defined in -lib/utils/config-defs.js. These must not be changed.
    • +lib/utils/config/definitions.js. These must not be changed.

    See config for much much more information.

    Contributions

    diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html index b5aabb23b11ae5..7acebd7fda9950 100644 --- a/deps/npm/docs/output/configuring-npm/install.html +++ b/deps/npm/docs/output/configuring-npm/install.html @@ -155,11 +155,11 @@

    Table of contents

    run npm packages globally.

    Overview

    Checking your version of npm and Node.js

    diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 0a31adb6c46a0d..35bc3529efaeef 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -261,8 +261,8 @@

    access

    current level
  • Type: null, "restricted", or "public"
  • -

    If do not want your scoped package to be publicly viewable (and installable) -set --access=restricted.

    +

    If you do not want your scoped package to be publicly viewable (and +installable) set --access=restricted.

    Unscoped packages can not be set to restricted.

    Note: This defaults to not changing the current access level for existing packages. Specifying a value of restricted or public during publish will @@ -303,7 +303,8 @@

    auth-type

  • Default: "web"
  • Type: "legacy" or "web"
  • -

    What authentication strategy to use with login.

    +

    What authentication strategy to use with login. Note that if an otp +config is given, this value will always be set to legacy.

    before

    • Default: null
    • @@ -1181,7 +1182,7 @@

      strict-peer-deps

      be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's peerDependencies object.

      -

      When such and override is performed, a warning is printed, explaining the +

      When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If --strict-peer-deps is set, then this warning is treated as a failure.

      strict-ssl

      @@ -1420,7 +1421,7 @@

      global-style

      --install-strategy=shallow

    Only install direct dependencies in the top level node_modules, but hoist -on deeper dependendencies. Sets --install-strategy=shallow.

    +on deeper dependencies. Sets --install-strategy=shallow.

    init.author.email

    • Default: ""
    • diff --git a/deps/npm/docs/output/using-npm/registry.html b/deps/npm/docs/output/using-npm/registry.html index 0d04f4dd821065..8ff40109fe0a64 100644 --- a/deps/npm/docs/output/using-npm/registry.html +++ b/deps/npm/docs/output/using-npm/registry.html @@ -169,7 +169,7 @@

      Table of contents

      Authentication configuration such as auth tokens and certificates are configured specifically scoped to an individual registry. See Auth Related Configuration

      -

      When the default registry is used in a package-lock or shrinkwrap is has the +

      When the default registry is used in a package-lock or shrinkwrap it has the special meaning of "the currently configured registry". If you create a lock file while using the default registry you can switch to another registry and npm will install packages from the new registry, but if you create a lock diff --git a/deps/npm/docs/output/using-npm/removal.html b/deps/npm/docs/output/using-npm/removal.html index 7e9af7040bb2bf..e54187122e1f71 100644 --- a/deps/npm/docs/output/using-npm/removal.html +++ b/deps/npm/docs/output/using-npm/removal.html @@ -159,8 +159,8 @@

      More Severe Uninstalling

      continue reading.

      Note that this is only necessary for globally-installed packages. Local installs are completely contained within a project's node_modules -folder. Delete that folder, and everything is gone less a package's -install script is particularly ill-behaved).

      +folder. Delete that folder, and everything is gone unless a package's +install script is particularly ill-behaved.

      This assumes that you installed node and npm in the default place. If you configured node with a different --prefix, or installed npm with a different prefix setting, then adjust the paths accordingly, replacing diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html index 79e14dc027ead0..6cc14bff7aeabe 100644 --- a/deps/npm/docs/output/using-npm/scripts.html +++ b/deps/npm/docs/output/using-npm/scripts.html @@ -208,7 +208,7 @@

      Life Cycle Scripts

    prepack

      -
    • Runs BEFORE a tarball is packed (on "npm pack", "npm publish", and when installing a git dependencies).
    • +
    • Runs BEFORE a tarball is packed (on "npm pack", "npm publish", and when installing a git dependency).
    • NOTE: "npm run pack" is NOT the same as "npm pack". "npm run pack" is an arbitrary user defined script name, where as, "npm pack" is a CLI defined command.

    postpack

    diff --git a/deps/npm/lib/arborist-cmd.js b/deps/npm/lib/arborist-cmd.js index 29efe984d9b26d..42699ece364ad1 100644 --- a/deps/npm/lib/arborist-cmd.js +++ b/deps/npm/lib/arborist-cmd.js @@ -17,22 +17,35 @@ class ArboristCmd extends BaseCommand { 'install-links', ] + static workspaces = true static ignoreImplicitWorkspace = false constructor (npm) { super(npm) - if (this.npm.config.isDefault('audit') - && (this.npm.global || this.npm.config.get('location') !== 'project') - ) { - this.npm.config.set('audit', false) - } else if (this.npm.global && this.npm.config.get('audit')) { - log.warn('config', - 'includes both --global and --audit, which is currently unsupported.') + + const { config } = this.npm + + // when location isn't set and global isn't true check for a package.json at + // the localPrefix and set the location to project if found + const locationProject = config.get('location') === 'project' || ( + config.isDefault('location') + // this is different then `npm.global` which falls back to checking + // location which we do not want to use here + && !config.get('global') + && npm.localPackage + ) + + // if audit is not set and we are in global mode and location is not project + // and we assume its not a project related context, then we set audit=false + if (config.isDefault('audit') && (this.npm.global || !locationProject)) { + config.set('audit', false) + } else if (this.npm.global && config.get('audit')) { + log.warn('config', 'includes both --global and --audit, which is currently unsupported.') } } - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) + async execWorkspaces (args) { + await this.setWorkspaces() return this.exec(args) } } diff --git a/deps/npm/lib/base-command.js b/deps/npm/lib/base-command.js index b57b7474a5efb0..0adff8e5d95ea8 100644 --- a/deps/npm/lib/base-command.js +++ b/deps/npm/lib/base-command.js @@ -8,12 +8,21 @@ const getWorkspaces = require('./workspaces/get-workspaces.js') const cmdAliases = require('./utils/cmd-list').aliases class BaseCommand { + static workspaces = false + static ignoreImplicitWorkspace = true + constructor (npm) { this.wrapWidth = 80 this.npm = npm - if (!this.skipConfigValidation) { - this.npm.config.validate() + const { config } = this.npm + + if (!this.constructor.skipConfigValidation) { + config.validate() + } + + if (config.get('workspaces') === false && config.get('workspace').length) { + throw new Error('Can not use --no-workspaces and --workspace at the same time') } } @@ -25,35 +34,31 @@ class BaseCommand { return this.constructor.description } - get ignoreImplicitWorkspace () { - return this.constructor.ignoreImplicitWorkspace - } - - get skipConfigValidation () { - return this.constructor.skipConfigValidation + get params () { + return this.constructor.params } get usage () { const usage = [ - `${this.constructor.description}`, + `${this.description}`, '', 'Usage:', ] if (!this.constructor.usage) { - usage.push(`npm ${this.constructor.name}`) + usage.push(`npm ${this.name}`) } else { - usage.push(...this.constructor.usage.map(u => `npm ${this.constructor.name} ${u}`)) + usage.push(...this.constructor.usage.map(u => `npm ${this.name} ${u}`)) } - if (this.constructor.params) { + if (this.params) { usage.push('') usage.push('Options:') usage.push(this.wrappedParams) } const aliases = Object.keys(cmdAliases).reduce((p, c) => { - if (cmdAliases[c] === this.constructor.name) { + if (cmdAliases[c] === this.name) { p.push(c) } return p @@ -68,7 +73,7 @@ class BaseCommand { } usage.push('') - usage.push(`Run "npm help ${this.constructor.name}" for more info`) + usage.push(`Run "npm help ${this.name}" for more info`) return usage.join('\n') } @@ -77,7 +82,7 @@ class BaseCommand { let results = '' let line = '' - for (const param of this.constructor.params) { + for (const param of this.params) { const usage = `[${ConfigDefinitions[param].usage}]` if (line.length && line.length + usage.length > this.wrapWidth) { results = [results, line].filter(Boolean).join('\n') @@ -98,26 +103,48 @@ class BaseCommand { }) } - async execWorkspaces (args, filters) { - throw Object.assign(new Error('This command does not support workspaces.'), { - code: 'ENOWORKSPACES', - }) - } + async cmdExec (args) { + const { config } = this.npm - async setWorkspaces (filters) { - if (this.isArboristCmd) { - this.includeWorkspaceRoot = false + if (config.get('usage')) { + return this.npm.output(this.usage) } - const relativeFrom = relative(this.npm.localPrefix, process.cwd()).startsWith('..') - ? this.npm.localPrefix - : process.cwd() + const hasWsConfig = config.get('workspaces') || config.get('workspace').length + // if cwd is a workspace, the default is set to [that workspace] + const implicitWs = config.get('workspace', 'default').length + // (-ws || -w foo) && (cwd is not a workspace || command is not ignoring implicit workspaces) + if (hasWsConfig && (!implicitWs || !this.constructor.ignoreImplicitWorkspace)) { + if (this.npm.global) { + throw new Error('Workspaces not supported for global packages') + } + if (!this.constructor.workspaces) { + throw Object.assign(new Error('This command does not support workspaces.'), { + code: 'ENOWORKSPACES', + }) + } + return this.execWorkspaces(args) + } + + return this.exec(args) + } + + async setWorkspaces () { + const includeWorkspaceRoot = this.isArboristCmd + ? false + : this.npm.config.get('include-workspace-root') + + const prefixInsideCwd = relative(this.npm.localPrefix, process.cwd()).startsWith('..') + const relativeFrom = prefixInsideCwd ? this.npm.localPrefix : process.cwd() + + const filters = this.npm.config.get('workspace') const ws = await getWorkspaces(filters, { path: this.npm.localPrefix, - includeWorkspaceRoot: this.includeWorkspaceRoot, + includeWorkspaceRoot, relativeFrom, }) + this.workspaces = ws this.workspaceNames = [...ws.keys()] this.workspacePaths = [...ws.values()] diff --git a/deps/npm/lib/cli.js b/deps/npm/lib/cli.js index 9aaf6c593675af..007778aa4b9866 100644 --- a/deps/npm/lib/cli.js +++ b/deps/npm/lib/cli.js @@ -68,6 +68,11 @@ module.exports = async process => { // leak any private CLI configs to other programs process.title = 'npm' + // if npm is called as "npmg" or "npm_g", then run in global mode. + if (process.argv[1][process.argv[1].length - 1] === 'g') { + process.argv.splice(1, 1, 'npm', '-g') + } + // Nothing should happen before this line if we can't guarantee it will // not have syntax errors in some version of node const validateEngines = createEnginesValidation() @@ -78,11 +83,6 @@ module.exports = async process => { const npm = new Npm() exitHandler.setNpm(npm) - // if npm is called as "npmg" or "npm_g", then run in global mode. - if (process.argv[1][process.argv[1].length - 1] === 'g') { - process.argv.splice(1, 1, 'npm', '-g') - } - // only log node and npm paths in argv initially since argv can contain // sensitive info. a cleaned version will be logged later const log = require('./utils/log-shim.js') @@ -112,6 +112,7 @@ module.exports = async process => { // this is how to use npm programmatically: try { await npm.load() + if (npm.config.get('version', 'cli')) { npm.output(npm.version) return exitHandler() @@ -130,7 +131,7 @@ module.exports = async process => { return exitHandler() } - await npm.exec(cmd, npm.argv) + await npm.exec(cmd) return exitHandler() } catch (err) { if (err.code === 'EUNKNOWNCOMMAND') { diff --git a/deps/npm/lib/commands/access.js b/deps/npm/lib/commands/access.js index d5ac5bb2f008e7..23e51f071b1124 100644 --- a/deps/npm/lib/commands/access.js +++ b/deps/npm/lib/commands/access.js @@ -37,8 +37,6 @@ class Access extends BaseCommand { 'registry', ] - static ignoreImplicitWorkspace = true - static usage = [ 'list packages [|| []', 'list collaborators [ []]', diff --git a/deps/npm/lib/commands/adduser.js b/deps/npm/lib/commands/adduser.js index 1e92b35f4a6623..cd4cba60511cb6 100644 --- a/deps/npm/lib/commands/adduser.js +++ b/deps/npm/lib/commands/adduser.js @@ -13,8 +13,6 @@ class AddUser extends BaseCommand { 'auth-type', ] - static ignoreImplicitWorkspace = true - async exec (args) { const scope = this.npm.config.get('scope') let registry = this.npm.config.get('registry') diff --git a/deps/npm/lib/commands/audit.js b/deps/npm/lib/commands/audit.js index feccefda0c9049..13886ea6350b66 100644 --- a/deps/npm/lib/commands/audit.js +++ b/deps/npm/lib/commands/audit.js @@ -152,7 +152,7 @@ class VerifySignatures { const keys = await fetch.json('/-/npm/v1/keys', { ...this.npm.flatOptions, registry, - }).then(({ keys }) => keys.map((key) => ({ + }).then(({ keys: ks }) => ks.map((key) => ({ ...key, pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`, }))).catch(err => { diff --git a/deps/npm/lib/commands/cache.js b/deps/npm/lib/commands/cache.js index a2e6434b34cab5..0ab40b9ed44a9f 100644 --- a/deps/npm/lib/commands/cache.js +++ b/deps/npm/lib/commands/cache.js @@ -1,9 +1,8 @@ const cacache = require('cacache') const Arborist = require('@npmcli/arborist') -const { promisify } = require('util') const pacote = require('pacote') -const path = require('path') -const rimraf = promisify(require('rimraf')) +const fs = require('fs/promises') +const { join } = require('path') const semver = require('semver') const BaseCommand = require('../base-command.js') const npa = require('npm-package-arg') @@ -75,8 +74,6 @@ class Cache extends BaseCommand { 'verify', ] - static ignoreImplicitWorkspace = true - async completion (opts) { const argv = opts.conf.argv.remain if (argv.length === 2) { @@ -112,7 +109,7 @@ class Cache extends BaseCommand { // npm cache clean [pkg]* async clean (args) { - const cachePath = path.join(this.npm.cache, '_cacache') + const cachePath = join(this.npm.cache, '_cacache') if (args.length === 0) { if (!this.npm.config.get('force')) { throw new Error(`As of npm@5, the npm cache self-heals from corruption issues @@ -130,7 +127,7 @@ class Cache extends BaseCommand { If you're sure you want to delete the entire cache, rerun this command with --force.`) } - return rimraf(cachePath) + return fs.rm(cachePath, { recursive: true, force: true }) } for (const key of args) { let entry @@ -170,7 +167,7 @@ class Cache extends BaseCommand { } async verify () { - const cache = path.join(this.npm.cache, '_cacache') + const cache = join(this.npm.cache, '_cacache') const prefix = cache.indexOf(process.env.HOME) === 0 ? `~${cache.slice(process.env.HOME.length)}` : cache @@ -193,7 +190,7 @@ class Cache extends BaseCommand { // npm cache ls [--package ...] async ls (specs) { - const cachePath = path.join(this.npm.cache, '_cacache') + const cachePath = join(this.npm.cache, '_cacache') const cacheKeys = Object.keys(await cacache.ls(cachePath)) if (specs.length > 0) { // get results for each package spec specified diff --git a/deps/npm/lib/commands/ci.js b/deps/npm/lib/commands/ci.js index 38ee1426d982ab..a2c61044eb96ee 100644 --- a/deps/npm/lib/commands/ci.js +++ b/deps/npm/lib/commands/ci.js @@ -1,10 +1,7 @@ -const util = require('util') const Arborist = require('@npmcli/arborist') -const rimraf = util.promisify(require('rimraf')) const reifyFinish = require('../utils/reify-finish.js') const runScript = require('@npmcli/run-script') -const fs = require('fs') -const readdir = util.promisify(fs.readdir) +const fs = require('fs/promises') const log = require('../utils/log-shim.js') const validateLockfile = require('../utils/validate-lockfile.js') @@ -69,8 +66,8 @@ class CI extends ArboristWorkspaceCmd { await this.npm.time('npm-ci:rm', async () => { const path = `${where}/node_modules` // get the list of entries so we can skip the glob for performance - const entries = await readdir(path, null).catch(er => []) - return Promise.all(entries.map(f => rimraf(`${path}/${f}`, { glob: false }))) + const entries = await fs.readdir(path, null).catch(er => []) + return Promise.all(entries.map(f => fs.rm(`${path}/${f}`, { force: true }))) }) await arb.reify(opts) diff --git a/deps/npm/lib/commands/completion.js b/deps/npm/lib/commands/completion.js index 8fc05b2e82313d..f5604e099f9a27 100644 --- a/deps/npm/lib/commands/completion.js +++ b/deps/npm/lib/commands/completion.js @@ -31,6 +31,7 @@ const fs = require('fs/promises') const nopt = require('nopt') +const { resolve } = require('path') const { definitions, shorthands } = require('../utils/config/index.js') const { aliases, commands, plumbing } = require('../utils/cmd-list.js') @@ -40,21 +41,13 @@ const configNames = Object.keys(definitions) const shorthandNames = Object.keys(shorthands) const allConfs = configNames.concat(shorthandNames) const { isWindowsShell } = require('../utils/is-windows.js') -const fileExists = async (file) => { - try { - const stat = await fs.stat(file) - return stat.isFile() - } catch { - return false - } -} +const fileExists = (file) => fs.stat(file).then(s => s.isFile()).catch(() => false) const BaseCommand = require('../base-command.js') class Completion extends BaseCommand { static description = 'Tab Completion for npm' static name = 'completion' - static ignoreImplicitWorkspace = true // completion for the completion command async completion (opts) { @@ -62,7 +55,6 @@ class Completion extends BaseCommand { return } - const { resolve } = require('path') const [bashExists, zshExists] = await Promise.all([ fileExists(resolve(process.env.HOME, '.bashrc')), fileExists(resolve(process.env.HOME, '.zshrc')), @@ -93,7 +85,7 @@ class Completion extends BaseCommand { if (COMP_CWORD === undefined || COMP_LINE === undefined || COMP_POINT === undefined) { - return dumpScript() + return dumpScript(resolve(this.npm.npmRoot, 'lib', 'utils', 'completion.sh')) } // ok we're actually looking at the envs and outputting the suggestions @@ -150,9 +142,9 @@ class Completion extends BaseCommand { // take a little shortcut and use npm's arg parsing logic. // don't have to worry about the last arg being implicitly // boolean'ed, since the last block will catch that. - const types = Object.entries(definitions).reduce((types, [key, def]) => { - types[key] = def.type - return types + const types = Object.entries(definitions).reduce((acc, [key, def]) => { + acc[key] = def.type + return acc }, {}) const parsed = opts.conf = nopt(types, shorthands, partialWords.slice(0, -1), 0) @@ -196,10 +188,7 @@ class Completion extends BaseCommand { } } -const dumpScript = async () => { - const { resolve } = require('path') - const p = resolve(__dirname, '..', 'utils', 'completion.sh') - +const dumpScript = async (p) => { const d = (await fs.readFile(p, 'utf8')).replace(/^#!.*?\n/, '') await new Promise((res, rej) => { let done = false diff --git a/deps/npm/lib/commands/config.js b/deps/npm/lib/commands/config.js index 103fbb554e5d1d..ac5a74d01f7de2 100644 --- a/deps/npm/lib/commands/config.js +++ b/deps/npm/lib/commands/config.js @@ -112,11 +112,6 @@ class Config extends BaseCommand { } } - async execWorkspaces (args, filters) { - log.warn('config', 'This command does not support workspaces.') - return this.exec(args) - } - async exec ([action, ...args]) { log.disableProgress() try { @@ -251,14 +246,14 @@ ${defData} `.split('\n').join(EOL) await mkdir(dirname(file), { recursive: true }) await writeFile(file, tmpData, 'utf8') - await new Promise((resolve, reject) => { + await new Promise((res, rej) => { const [bin, ...args] = e.split(/\s+/) const editor = spawn(bin, [...args, file], { stdio: 'inherit' }) editor.on('exit', (code) => { if (code) { - return reject(new Error(`editor process exited with code: ${code}`)) + return rej(new Error(`editor process exited with code: ${code}`)) } - return resolve() + return res() }) }) } diff --git a/deps/npm/lib/commands/diff.js b/deps/npm/lib/commands/diff.js index c8fd734918d75e..1f4bfd3eb11512 100644 --- a/deps/npm/lib/commands/diff.js +++ b/deps/npm/lib/commands/diff.js @@ -32,6 +32,7 @@ class Diff extends BaseCommand { 'include-workspace-root', ] + static workspaces = true static ignoreImplicitWorkspace = false async exec (args) { @@ -67,8 +68,8 @@ class Diff extends BaseCommand { return this.npm.output(res) } - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) + async execWorkspaces (args) { + await this.setWorkspaces() for (const workspacePath of this.workspacePaths) { this.top = workspacePath this.prefix = workspacePath diff --git a/deps/npm/lib/commands/dist-tag.js b/deps/npm/lib/commands/dist-tag.js index 8052e4f7e4e38c..bc61a4691e55a0 100644 --- a/deps/npm/lib/commands/dist-tag.js +++ b/deps/npm/lib/commands/dist-tag.js @@ -17,6 +17,7 @@ class DistTag extends BaseCommand { 'ls []', ] + static workspaces = true static ignoreImplicitWorkspace = false async completion (opts) { @@ -57,14 +58,14 @@ class DistTag extends BaseCommand { } } - async execWorkspaces ([cmdName, pkg, tag], filters) { + async execWorkspaces ([cmdName, pkg, tag]) { // cmdName is some form of list // pkg is one of: // - unset // - . // - .@version if (['ls', 'l', 'sl', 'list'].includes(cmdName) && (!pkg || pkg === '.' || /^\.@/.test(pkg))) { - return this.listWorkspaces(filters) + return this.listWorkspaces() } // pkg is unset @@ -73,12 +74,12 @@ class DistTag extends BaseCommand { // - . // - .@version if (!pkg && (!cmdName || cmdName === '.' || /^\.@/.test(cmdName))) { - return this.listWorkspaces(filters) + return this.listWorkspaces() } // anything else is just a regular dist-tag command // so we fallback to the non-workspaces implementation - log.warn('Ignoring workspaces for specified package') + log.warn('dist-tag', 'Ignoring workspaces for specified package') return this.exec([cmdName, pkg, tag]) } @@ -116,7 +117,7 @@ class DistTag extends BaseCommand { }, spec, } - await otplease(this.npm, reqOpts, reqOpts => regFetch(url, reqOpts)) + await otplease(this.npm, reqOpts, o => regFetch(url, o)) this.npm.output(`+${t}: ${spec.name}@${version}`) } @@ -142,7 +143,7 @@ class DistTag extends BaseCommand { method: 'DELETE', spec, } - await otplease(this.npm, reqOpts, reqOpts => regFetch(url, reqOpts)) + await otplease(this.npm, reqOpts, o => regFetch(url, o)) this.npm.output(`-${tag}: ${spec.name}@${version}`) } @@ -172,8 +173,8 @@ class DistTag extends BaseCommand { } } - async listWorkspaces (filters) { - await this.setWorkspaces(filters) + async listWorkspaces () { + await this.setWorkspaces() for (const name of this.workspaceNames) { try { diff --git a/deps/npm/lib/commands/edit.js b/deps/npm/lib/commands/edit.js index 67ac32e0171843..a671a5d6bad5d6 100644 --- a/deps/npm/lib/commands/edit.js +++ b/deps/npm/lib/commands/edit.js @@ -51,23 +51,23 @@ class Edit extends BaseCommand { const dir = resolve(this.npm.dir, path) // graceful-fs does not promisify - await new Promise((resolve, reject) => { + await new Promise((res, rej) => { fs.lstat(dir, (err) => { if (err) { - return reject(err) + return rej(err) } - const [bin, ...args] = this.npm.config.get('editor').split(/\s+/) - const editor = cp.spawn(bin, [...args, dir], { stdio: 'inherit' }) + const [bin, ...spawnArgs] = this.npm.config.get('editor').split(/\s+/) + const editor = cp.spawn(bin, [...spawnArgs, dir], { stdio: 'inherit' }) editor.on('exit', async (code) => { if (code) { - return reject(new Error(`editor process exited with code: ${code}`)) + return rej(new Error(`editor process exited with code: ${code}`)) } try { await this.npm.exec('rebuild', [dir]) - } catch (err) { - reject(err) + } catch (execErr) { + rej(execErr) } - resolve() + res() }) }) }) diff --git a/deps/npm/lib/commands/exec.js b/deps/npm/lib/commands/exec.js index a77a6326c00f28..a5235c7845851b 100644 --- a/deps/npm/lib/commands/exec.js +++ b/deps/npm/lib/commands/exec.js @@ -1,4 +1,4 @@ -const path = require('path') +const { resolve } = require('path') const libexec = require('libnpmexec') const BaseCommand = require('../base-command.js') @@ -20,10 +20,25 @@ class Exec extends BaseCommand { '--package=foo -c \' [args...]\'', ] + static workspaces = true static ignoreImplicitWorkspace = false static isShellout = true - async exec (_args, { locationMsg, runPath } = {}) { + async exec (args) { + return this.callExec(args) + } + + async execWorkspaces (args) { + await this.setWorkspaces() + + for (const [name, path] of this.workspaces) { + const locationMsg = + `in workspace ${this.npm.chalk.green(name)} at location:\n${this.npm.chalk.dim(path)}` + await this.callExec(args, { locationMsg, runPath: path }) + } + } + + async callExec (args, { locationMsg, runPath } = {}) { // This is where libnpmexec will look for locally installed packages const localPrefix = this.npm.localPrefix @@ -32,7 +47,6 @@ class Exec extends BaseCommand { runPath = process.cwd() } - const args = [..._args] const call = this.npm.config.get('call') let globalPath const { @@ -49,10 +63,10 @@ class Exec extends BaseCommand { // is invalid (i.e. no lib/node_modules). This is not a trivial thing to // untangle and fix so we work around it here. if (this.npm.localPrefix !== this.npm.globalPrefix) { - globalPath = path.resolve(globalDir, '..') + globalPath = resolve(globalDir, '..') } - if (call && _args.length) { + if (call && args.length) { throw this.usageError() } @@ -61,7 +75,8 @@ class Exec extends BaseCommand { // we explicitly set packageLockOnly to false because if it's true // when we try to install a missing package, we won't actually install it packageLockOnly: false, - args, + // copy args so they dont get mutated + args: [...args], call, localBin, locationMsg, @@ -75,16 +90,6 @@ class Exec extends BaseCommand { yes, }) } - - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) - - for (const [name, path] of this.workspaces) { - const locationMsg = - `in workspace ${this.npm.chalk.green(name)} at location:\n${this.npm.chalk.dim(path)}` - await this.exec(args, { locationMsg, runPath: path }) - } - } } module.exports = Exec diff --git a/deps/npm/lib/commands/find-dupes.js b/deps/npm/lib/commands/find-dupes.js index b99ea7a14eb211..b1a31208603663 100644 --- a/deps/npm/lib/commands/find-dupes.js +++ b/deps/npm/lib/commands/find-dupes.js @@ -18,7 +18,7 @@ class FindDupes extends ArboristWorkspaceCmd { ...super.params, ] - async exec (args, cb) { + async exec (args) { this.npm.config.set('dry-run', true) return this.npm.exec('dedupe', []) } diff --git a/deps/npm/lib/commands/fund.js b/deps/npm/lib/commands/fund.js index 9690cbc32e0792..12762533c123e6 100644 --- a/deps/npm/lib/commands/fund.js +++ b/deps/npm/lib/commands/fund.js @@ -16,12 +16,27 @@ const getPrintableName = ({ name, version }) => { return `${name}${printableVersion}` } +const errCode = (msg, code) => Object.assign(new Error(msg), { code }) + class Fund extends ArboristWorkspaceCmd { static description = 'Retrieve funding information' static name = 'fund' static params = ['json', 'browser', 'unicode', 'workspace', 'which'] static usage = ['[]'] + // XXX: maybe worth making this generic for all commands? + usageMessage (paramsObj = {}) { + let msg = `\`npm ${this.constructor.name}` + const params = Object.entries(paramsObj) + if (params.length) { + msg += ` ${this.constructor.usage}` + } + for (const [key, value] of params) { + msg += ` --${key}=${value}` + } + return `${msg}\`` + } + // TODO /* istanbul ignore next */ async completion (opts) { @@ -30,25 +45,23 @@ class Fund extends ArboristWorkspaceCmd { async exec (args) { const spec = args[0] - const numberArg = this.npm.config.get('which') - const fundingSourceNumber = numberArg && parseInt(numberArg, 10) - - const badFundingSourceNumber = - numberArg !== null && (String(fundingSourceNumber) !== numberArg || fundingSourceNumber < 1) - - if (badFundingSourceNumber) { - const err = new Error( - '`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer' - ) - err.code = 'EFUNDNUMBER' - throw err + let fundingSourceNumber = this.npm.config.get('which') + if (fundingSourceNumber != null) { + fundingSourceNumber = parseInt(fundingSourceNumber, 10) + if (isNaN(fundingSourceNumber) || fundingSourceNumber < 1) { + throw errCode( + `${this.usageMessage({ which: 'fundingSourceNumber' })} must be given a positive integer`, + 'EFUNDNUMBER' + ) + } } if (this.npm.global) { - const err = new Error('`npm fund` does not support global packages') - err.code = 'EFUNDGLOBAL' - throw err + throw errCode( + `${this.usageMessage()} does not support global packages`, + 'EFUNDGLOBAL' + ) } const where = this.npm.prefix @@ -146,6 +159,7 @@ class Fund extends ArboristWorkspaceCmd { async openFundingUrl ({ path, tree, spec, fundingSourceNumber }) { const arg = npa(spec, path) + const retrievePackageMetadata = () => { if (arg.type === 'directory') { if (tree.path === arg.fetchSpec) { @@ -178,32 +192,35 @@ class Fund extends ArboristWorkspaceCmd { const validSources = [].concat(normalizeFunding(funding)).filter(isValidFunding) - const matchesValidSource = - validSources.length === 1 || - (fundingSourceNumber > 0 && fundingSourceNumber <= validSources.length) - - if (matchesValidSource) { - const index = fundingSourceNumber ? fundingSourceNumber - 1 : 0 - const { type, url } = validSources[index] - const typePrefix = type ? `${type} funding` : 'Funding' - const msg = `${typePrefix} available at the following URL` - return openUrl(this.npm, url, msg) - } else if (validSources.length && !(fundingSourceNumber >= 1)) { - validSources.forEach(({ type, url }, i) => { - const typePrefix = type ? `${type} funding` : 'Funding' - const msg = `${typePrefix} available at the following URL` - this.npm.output(`${i + 1}: ${msg}: ${url}`) - }) - this.npm.output( - /* eslint-disable-next-line max-len */ - 'Run `npm fund [<@scope>/] --which=1`, for example, to open the first funding URL listed in that package' - ) - } else { - const noFundingError = new Error(`No valid funding method available for: ${spec}`) - noFundingError.code = 'ENOFUND' + if (!validSources.length) { + throw errCode(`No valid funding method available for: ${spec}`, 'ENOFUND') + } - throw noFundingError + const fundSource = fundingSourceNumber + ? validSources[fundingSourceNumber - 1] + : validSources.length === 1 ? validSources[0] + : null + + if (fundSource) { + return openUrl(this.npm, ...this.urlMessage(fundSource)) + } + + const ambiguousUrlMsg = [ + ...validSources.map((s, i) => `${i + 1}: ${this.urlMessage(s).reverse().join(': ')}`), + `Run ${this.usageMessage({ which: '1' })}` + + ', for example, to open the first funding URL listed in that package', + ] + if (fundingSourceNumber) { + ambiguousUrlMsg.unshift(`--which=${fundingSourceNumber} is not a valid index`) } + this.npm.output(ambiguousUrlMsg.join('\n')) + } + + urlMessage (source) { + const { type, url } = source + const typePrefix = type ? `${type} funding` : 'Funding' + const message = `${typePrefix} available at the following URL` + return [url, message] } } module.exports = Fund diff --git a/deps/npm/lib/commands/help-search.js b/deps/npm/lib/commands/help-search.js index 488189bbbc5cd8..afb82bfaca9eea 100644 --- a/deps/npm/lib/commands/help-search.js +++ b/deps/npm/lib/commands/help-search.js @@ -13,14 +13,13 @@ class HelpSearch extends BaseCommand { static name = 'help-search' static usage = [''] static params = ['long'] - static ignoreImplicitWorkspace = true async exec (args) { if (!args.length) { throw this.usageError() } - const docPath = path.resolve(__dirname, '..', '..', 'docs/content') + const docPath = path.resolve(this.npm.npmRoot, 'docs/content') const files = await glob(`${globify(docPath)}/*/*.md`) const data = await this.readFiles(files) const results = await this.searchFiles(args, data, files) @@ -142,7 +141,7 @@ class HelpSearch extends BaseCommand { formatResults (args, results) { const cols = Math.min(process.stdout.columns || Infinity, 80) + 1 - const out = results.map(res => { + const output = results.map(res => { const out = [res.cmd] const r = Object.keys(res.hits) .map(k => `${k}:${res.hits[k]}`) @@ -189,10 +188,10 @@ class HelpSearch extends BaseCommand { const finalOut = results.length && !this.npm.config.get('long') ? 'Top hits for ' + (args.map(JSON.stringify).join(' ')) + '\n' + '—'.repeat(cols - 1) + '\n' + - out + '\n' + + output + '\n' + '—'.repeat(cols - 1) + '\n' + '(run with -l or --long to see more context)' - : out + : output return finalOut.trim() } diff --git a/deps/npm/lib/commands/help.js b/deps/npm/lib/commands/help.js index e7d6395a1b01a6..3ab2c563198680 100644 --- a/deps/npm/lib/commands/help.js +++ b/deps/npm/lib/commands/help.js @@ -1,4 +1,4 @@ -const { spawn } = require('child_process') +const spawn = require('@npmcli/promise-spawn') const path = require('path') const openUrl = require('../utils/open-url.js') const { promisify } = require('util') @@ -14,19 +14,26 @@ const BaseCommand = require('../base-command.js') const manNumberRegex = /\.(\d+)(\.[^/\\]*)?$/ // Searches for the "npm-" prefix in page names, to prefer those. const manNpmPrefixRegex = /\/npm-/ +// hardcoded names for mansections +// XXX: these are used in the docs workspace and should be exported +// from npm so section names can changed more easily +const manSectionNames = { + 1: 'commands', + 5: 'configuring-npm', + 7: 'using-npm', +} class Help extends BaseCommand { static description = 'Get help on npm' static name = 'help' static usage = [' []'] static params = ['viewer'] - static ignoreImplicitWorkspace = true async completion (opts) { if (opts.conf.argv.remain.length > 2) { return [] } - const g = path.resolve(__dirname, '../../man/man[0-9]/*.[0-9]') + const g = path.resolve(this.npm.npmRoot, 'man/man[0-9]/*.[0-9]') const files = await glob(globify(g)) return Object.keys(files.reduce(function (acc, file) { @@ -40,10 +47,7 @@ class Help extends BaseCommand { async exec (args) { // By default we search all of our man subdirectories, but if the user has // asked for a specific one we limit the search to just there - let manSearch = 'man*' - if (/^\d+$/.test(args[0])) { - manSearch = `man${args.shift()}` - } + const manSearch = /^\d+$/.test(args[0]) ? `man${args.shift()}` : 'man*' if (!args.length) { return this.npm.output(await this.npm.usage) @@ -54,20 +58,18 @@ class Help extends BaseCommand { return this.helpSearch(args) } - let section = this.npm.deref(args[0]) || args[0] - - // support `npm help package.json` - section = section.replace('.json', '-json') + // `npm help package.json` + const arg = (this.npm.deref(args[0]) || args[0]).replace('.json', '-json') - const manroot = path.resolve(__dirname, '..', '..', 'man') // find either section.n or npm-section.n - const f = `${manroot}/${manSearch}/?(npm-)${section}.[0-9]*` - let mans = await glob(globify(f)) - mans = mans.sort((a, b) => { + const f = globify(path.resolve(this.npm.npmRoot, `man/${manSearch}/?(npm-)${arg}.[0-9]*`)) + + const [man] = await glob(f).then(r => r.sort((a, b) => { // Prefer the page with an npm prefix, if there's only one. const aHasPrefix = manNpmPrefixRegex.test(a) const bHasPrefix = manNpmPrefixRegex.test(b) if (aHasPrefix !== bHasPrefix) { + /* istanbul ignore next */ return aHasPrefix ? -1 : 1 } @@ -76,6 +78,7 @@ class Help extends BaseCommand { const aManNumberMatch = a.match(manNumberRegex) const bManNumberMatch = b.match(manNumberRegex) if (aManNumberMatch) { + /* istanbul ignore next */ if (!bManNumberMatch) { return -1 } @@ -88,14 +91,9 @@ class Help extends BaseCommand { } return localeCompare(a, b) - }) - const man = mans[0] + })) - if (man) { - await this.viewMan(man) - } else { - return this.helpSearch(args) - } + return man ? this.viewMan(man) : this.helpSearch(args) } helpSearch (args) { @@ -103,62 +101,31 @@ class Help extends BaseCommand { } async viewMan (man) { - const env = {} - Object.keys(process.env).forEach(function (i) { - env[i] = process.env[i] - }) const viewer = this.npm.config.get('viewer') - const opts = { - env, - stdio: 'inherit', + if (viewer === 'browser') { + return openUrl(this.npm, this.htmlMan(man), 'help available at the following URL', true) } - let bin = 'man' - const args = [] - switch (viewer) { - case 'woman': - bin = 'emacsclient' - args.push('-e', `(woman-find-file '${man}')`) - break - - case 'browser': - await openUrl(this.npm, this.htmlMan(man), 'help available at the following URL', true) - return - - default: - args.push(man) - break + let args = ['man', [man]] + if (viewer === 'woman') { + args = ['emacsclient', ['-e', `(woman-find-file '${man}')`]] } - const proc = spawn(bin, args, opts) - return new Promise((resolve, reject) => { - proc.on('exit', (code) => { - if (code) { - return reject(new Error(`help process exited with code: ${code}`)) - } - - return resolve() - }) + return spawn(...args, { stdio: 'inherit' }).catch(err => { + if (err.code) { + throw new Error(`help process exited with code: ${err.code}`) + } else { + throw err + } }) } // Returns the path to the html version of the man page htmlMan (man) { - let sect = man.match(manNumberRegex)[1] + const sect = manSectionNames[man.match(manNumberRegex)[1]] const f = path.basename(man).replace(manNumberRegex, '') - switch (sect) { - case '1': - sect = 'commands' - break - case '5': - sect = 'configuring-npm' - break - case '7': - sect = 'using-npm' - break - } - return 'file:///' + path.resolve(__dirname, '..', '..', 'docs', 'output', sect, f + '.html') + return 'file:///' + path.resolve(this.npm.npmRoot, `docs/output/${sect}/${f}.html`) } } module.exports = Help diff --git a/deps/npm/lib/commands/hook.js b/deps/npm/lib/commands/hook.js index 084741c0c5eeaf..b0f52a801f5717 100644 --- a/deps/npm/lib/commands/hook.js +++ b/deps/npm/lib/commands/hook.js @@ -19,12 +19,8 @@ class Hook extends BaseCommand { 'update ', ] - static ignoreImplicitWorkspace = true - async exec (args) { - return otplease(this.npm, { - ...this.npm.flatOptions, - }, (opts) => { + return otplease(this.npm, { ...this.npm.flatOptions }, (opts) => { switch (args[0]) { case 'add': return this.add(args[1], args[2], args[3], opts) @@ -49,9 +45,7 @@ class Hook extends BaseCommand { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!this.npm.silent) { - this.npm.output(`+ ${this.hookName(hook)} ${ - opts.unicode ? ' ➜ ' : ' -> ' - } ${hook.endpoint}`) + this.npm.output(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`) } } @@ -104,9 +98,7 @@ class Hook extends BaseCommand { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!this.npm.silent) { - this.npm.output(`- ${this.hookName(hook)} ${ - opts.unicode ? ' ✘ ' : ' X ' - } ${hook.endpoint}`) + this.npm.output(`- ${this.hookName(hook)} ${opts.unicode ? ' ✘ ' : ' X '} ${hook.endpoint}`) } } @@ -118,9 +110,7 @@ class Hook extends BaseCommand { this.npm.output(Object.keys(hook).join('\t')) this.npm.output(Object.keys(hook).map(k => hook[k]).join('\t')) } else if (!this.npm.silent) { - this.npm.output(`+ ${this.hookName(hook)} ${ - opts.unicode ? ' ➜ ' : ' -> ' - } ${hook.endpoint}`) + this.npm.output(`+ ${this.hookName(hook)} ${opts.unicode ? ' ➜ ' : ' -> '} ${hook.endpoint}`) } } diff --git a/deps/npm/lib/commands/init.js b/deps/npm/lib/commands/init.js index 02a43b0ef0960f..16ece46589d7cb 100644 --- a/deps/npm/lib/commands/init.js +++ b/deps/npm/lib/commands/init.js @@ -10,6 +10,8 @@ const PackageJson = require('@npmcli/package-json') const log = require('../utils/log-shim.js') const updateWorkspaces = require('../workspaces/update-workspaces.js') +const posixPath = p => p.split('\\').join('/') + const BaseCommand = require('../base-command.js') class Init extends BaseCommand { @@ -26,23 +28,24 @@ class Init extends BaseCommand { static name = 'init' static usage = [ - ' (same as `npx )', + ' (same as `npx `)', '<@scope> (same as `npx <@scope>/create`)', ] + static workspaces = true static ignoreImplicitWorkspace = false async exec (args) { // npm exec style if (args.length) { - return (await this.execCreate({ args, path: process.cwd() })) + return await this.execCreate(args) } // no args, uses classic init-package-json boilerplate await this.template() } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { // if the root package is uninitiated, take care of it first if (this.npm.flatOptions.includeWorkspaceRoot) { await this.exec(args) @@ -51,7 +54,16 @@ class Init extends BaseCommand { // reads package.json for the top-level folder first, by doing this we // ensure the command throw if no package.json is found before trying // to create a workspace package.json file or its folders - const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')) + const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')).catch((err) => { + if (err.code === 'ENOENT') { + log.warn('Missing package.json. Try with `--include-workspace-root`.') + } + throw err + }) + + // these are workspaces that are being created, so we cant use + // this.setWorkspaces() + const filters = this.npm.config.get('workspace') const wPath = filterArg => resolve(this.npm.localPrefix, filterArg) const workspacesPaths = [] @@ -61,8 +73,8 @@ class Init extends BaseCommand { const path = wPath(filterArg) await mkdir(path, { recursive: true }) workspacesPaths.push(path) - await this.execCreate({ args, path }) - await this.setWorkspace({ pkg, workspacePath: path }) + await this.execCreate(args, path) + await this.setWorkspace(pkg, path) } return } @@ -73,14 +85,14 @@ class Init extends BaseCommand { await mkdir(path, { recursive: true }) workspacesPaths.push(path) await this.template(path) - await this.setWorkspace({ pkg, workspacePath: path }) + await this.setWorkspace(pkg, path) } // reify packages once all workspaces have been initialized await this.update(workspacesPaths) } - async execCreate ({ args, path }) { + async execCreate (args, path = process.cwd()) { const [initerName, ...otherArgs] = args let packageName = initerName @@ -95,8 +107,7 @@ class Init extends BaseCommand { const req = npa(initerName) if (req.type === 'git' && req.hosted) { const { user, project } = req.hosted - packageName = initerName - .replace(user + '/' + project, user + '/create-' + project) + packageName = initerName.replace(`${user}/${project}`, `${user}/create-${project}`) } else if (req.registry) { packageName = `${req.name.replace(/^(@[^/]+\/)?/, '$1create-')}@${req.rawSpec}` } else { @@ -174,7 +185,7 @@ class Init extends BaseCommand { }) } - async setWorkspace ({ pkg, workspacePath }) { + async setWorkspace (pkg, workspacePath) { const workspaces = await mapWorkspaces({ cwd: this.npm.localPrefix, pkg }) // skip setting workspace if current package.json glob already satisfies it @@ -199,7 +210,7 @@ class Init extends BaseCommand { pkgJson.update({ workspaces: [ ...(pkgJson.content.workspaces || []), - relative(this.npm.localPrefix, workspacePath), + posixPath(relative(this.npm.localPrefix, workspacePath)), ], }) @@ -210,9 +221,7 @@ class Init extends BaseCommand { // translate workspaces paths into an array containing workspaces names const workspaces = [] for (const path of workspacesPaths) { - const pkgPath = resolve(path, 'package.json') - const { name } = await rpj(pkgPath) - .catch(() => ({})) + const { name } = await rpj(resolve(path, 'package.json')).catch(() => ({})) if (name) { workspaces.push(name) diff --git a/deps/npm/lib/commands/install-ci-test.js b/deps/npm/lib/commands/install-ci-test.js index 9977a2edc5641c..f7a357ba6e1246 100644 --- a/deps/npm/lib/commands/install-ci-test.js +++ b/deps/npm/lib/commands/install-ci-test.js @@ -7,7 +7,7 @@ class InstallCITest extends CI { static description = 'Install a project with a clean slate and run tests' static name = 'install-ci-test' - async exec (args, cb) { + async exec (args) { await this.npm.exec('ci', args) return this.npm.exec('test', []) } diff --git a/deps/npm/lib/commands/install-test.js b/deps/npm/lib/commands/install-test.js index 191d70909f9e75..11f22e535403cc 100644 --- a/deps/npm/lib/commands/install-test.js +++ b/deps/npm/lib/commands/install-test.js @@ -7,7 +7,7 @@ class InstallTest extends Install { static description = 'Install package(s) and run tests' static name = 'install-test' - async exec (args, cb) { + async exec (args) { await this.npm.exec('install', args) return this.npm.exec('test', []) } diff --git a/deps/npm/lib/commands/login.js b/deps/npm/lib/commands/login.js index 7f6898d00ba93d..dc4ed8a67acd97 100644 --- a/deps/npm/lib/commands/login.js +++ b/deps/npm/lib/commands/login.js @@ -13,8 +13,6 @@ class Login extends BaseCommand { 'auth-type', ] - static ignoreImplicitWorkspace = true - async exec (args) { const scope = this.npm.config.get('scope') let registry = this.npm.config.get('registry') diff --git a/deps/npm/lib/commands/logout.js b/deps/npm/lib/commands/logout.js index 7c2a7f0b2f830d..aea5e93652b0e8 100644 --- a/deps/npm/lib/commands/logout.js +++ b/deps/npm/lib/commands/logout.js @@ -11,8 +11,6 @@ class Logout extends BaseCommand { 'scope', ] - static ignoreImplicitWorkspace = true - async exec (args) { const registry = this.npm.config.get('registry') const scope = this.npm.config.get('scope') diff --git a/deps/npm/lib/commands/ls.js b/deps/npm/lib/commands/ls.js index 7eebdf691683fb..2213e7937407a9 100644 --- a/deps/npm/lib/commands/ls.js +++ b/deps/npm/lib/commands/ls.js @@ -178,11 +178,9 @@ class LS extends ArboristWorkspaceCmd { e.code === 'EJSONPARSE' && e.path === resolve(path, 'package.json')) this.npm.outputBuffer( - json - ? jsonOutput({ path, problems, result, rootError, seenItems }) - : parseable - ? parseableOutput({ seenNodes, global, long }) - : humanOutput({ color, result, seenItems, unicode }) + json ? jsonOutput({ path, problems, result, rootError, seenItems }) : + parseable ? parseableOutput({ seenNodes, global, long }) : + humanOutput({ color, result, seenItems, unicode }) ) // if filtering items, should exit with error code on no results @@ -402,7 +400,7 @@ const getJsonOutputItem = (node, { global, long }) => { return augmentItemWithIncludeMetadata(node, item) } -const filterByEdgesTypes = ({ link, omit = [] }) => (edge) => { +const filterByEdgesTypes = ({ link, omit }) => (edge) => { for (const omitType of omit) { if (edge[omitType]) { return false diff --git a/deps/npm/lib/commands/org.js b/deps/npm/lib/commands/org.js index f49556c8d6a195..575ff75e2a6cf3 100644 --- a/deps/npm/lib/commands/org.js +++ b/deps/npm/lib/commands/org.js @@ -13,7 +13,6 @@ class Org extends BaseCommand { ] static params = ['registry', 'otp', 'json', 'parseable'] - static ignoreImplicitWorkspace = true async completion (opts) { const argv = opts.conf.argv.remain @@ -32,7 +31,7 @@ class Org extends BaseCommand { } } - async exec ([cmd, orgname, username, role], cb) { + async exec ([cmd, orgname, username, role]) { return otplease(this.npm, { ...this.npm.flatOptions, }, opts => { @@ -139,15 +138,15 @@ class Org extends BaseCommand { this.npm.output(JSON.stringify(roster, null, 2)) } else if (opts.parseable) { this.npm.output(['user', 'role'].join('\t')) - Object.keys(roster).forEach(user => { - this.npm.output([user, roster[user]].join('\t')) + Object.keys(roster).forEach(u => { + this.npm.output([u, roster[u]].join('\t')) }) } else if (!this.npm.silent) { const table = new Table({ head: ['user', 'role'] }) Object.keys(roster) .sort() - .forEach(user => { - table.push([user, roster[user]]) + .forEach(u => { + table.push([u, roster[u]]) }) this.npm.output(table.toString()) } diff --git a/deps/npm/lib/commands/outdated.js b/deps/npm/lib/commands/outdated.js index 9e2060658ed72d..5e8a4e0d2168c5 100644 --- a/deps/npm/lib/commands/outdated.js +++ b/deps/npm/lib/commands/outdated.js @@ -1,5 +1,5 @@ const os = require('os') -const path = require('path') +const { resolve } = require('path') const pacote = require('pacote') const table = require('text-table') const chalk = require('chalk') @@ -26,7 +26,7 @@ class Outdated extends ArboristWorkspaceCmd { ] async exec (args) { - const global = path.resolve(this.npm.globalDir, '..') + const global = resolve(this.npm.globalDir, '..') const where = this.npm.global ? global : this.npm.prefix diff --git a/deps/npm/lib/commands/owner.js b/deps/npm/lib/commands/owner.js index 824b64e044ecf2..3a997db800db7d 100644 --- a/deps/npm/lib/commands/owner.js +++ b/deps/npm/lib/commands/owner.js @@ -32,6 +32,7 @@ class Owner extends BaseCommand { 'ls ', ] + static workspaces = true static ignoreImplicitWorkspace = false async completion (opts) { @@ -82,8 +83,8 @@ class Owner extends BaseCommand { } } - async execWorkspaces ([action, ...args], filters) { - await this.setWorkspaces(filters) + async execWorkspaces ([action, ...args]) { + await this.setWorkspaces() // ls pkg or owner add/rm package if ((action === 'ls' && args.length > 0) || args.length > 1) { const implicitWorkspaces = this.npm.config.get('workspace', 'default') @@ -119,7 +120,7 @@ class Owner extends BaseCommand { this.npm.output(maintainers.map(m => `${m.name} <${m.email}>`).join('\n')) } } catch (err) { - log.error('owner ls', "Couldn't get owner data", pkg) + log.error('owner ls', "Couldn't get owner data", npmFetch.cleanUrl(pkg)) throw err } } diff --git a/deps/npm/lib/commands/pack.js b/deps/npm/lib/commands/pack.js index c6a74804642f66..74e80e573c2e92 100644 --- a/deps/npm/lib/commands/pack.js +++ b/deps/npm/lib/commands/pack.js @@ -18,6 +18,7 @@ class Pack extends BaseCommand { ] static usage = [''] + static workspaces = true static ignoreImplicitWorkspace = false async exec (args) { @@ -64,7 +65,7 @@ class Pack extends BaseCommand { } } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { // If they either ask for nothing, or explicitly include '.' in the args, // we effectively translate that into each workspace requested @@ -75,7 +76,7 @@ class Pack extends BaseCommand { return this.exec(args) } - await this.setWorkspaces(filters) + await this.setWorkspaces() return this.exec([...this.workspacePaths, ...args.filter(a => a !== '.')]) } } diff --git a/deps/npm/lib/commands/ping.js b/deps/npm/lib/commands/ping.js index 22039214689a96..c79e6a96cee405 100644 --- a/deps/npm/lib/commands/ping.js +++ b/deps/npm/lib/commands/ping.js @@ -1,3 +1,4 @@ +const { cleanUrl } = require('npm-registry-fetch') const log = require('../utils/log-shim') const pingUtil = require('../utils/ping.js') const BaseCommand = require('../base-command.js') @@ -6,17 +7,17 @@ class Ping extends BaseCommand { static description = 'Ping npm registry' static params = ['registry'] static name = 'ping' - static ignoreImplicitWorkspace = true async exec (args) { - log.notice('PING', this.npm.config.get('registry')) + const cleanRegistry = cleanUrl(this.npm.config.get('registry')) + log.notice('PING', cleanRegistry) const start = Date.now() const details = await pingUtil({ ...this.npm.flatOptions }) const time = Date.now() - start log.notice('PONG', `${time}ms`) if (this.npm.config.get('json')) { this.npm.output(JSON.stringify({ - registry: this.npm.config.get('registry'), + registry: cleanRegistry, time, details, }, null, 2)) diff --git a/deps/npm/lib/commands/pkg.js b/deps/npm/lib/commands/pkg.js index 5fac9bfb54683b..5cdcd207887c9e 100644 --- a/deps/npm/lib/commands/pkg.js +++ b/deps/npm/lib/commands/pkg.js @@ -20,6 +20,7 @@ class Pkg extends BaseCommand { 'workspaces', ] + static workspaces = true static ignoreImplicitWorkspace = false async exec (args, { prefix } = {}) { @@ -49,8 +50,8 @@ class Pkg extends BaseCommand { } } - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) + async execWorkspaces (args) { + await this.setWorkspaces() const result = {} for (const [workspaceName, workspacePath] of this.workspaces.entries()) { this.prefix = workspacePath @@ -81,7 +82,7 @@ class Pkg extends BaseCommand { // only outputs if not running with workspaces config, // in case you're retrieving info for workspaces the pkgWorkspaces // will handle the output to make sure it get keyed by ws name - if (!this.workspaces) { + if (!this.npm.config.get('workspaces')) { this.npm.output(JSON.stringify(result, null, 2)) } diff --git a/deps/npm/lib/commands/prefix.js b/deps/npm/lib/commands/prefix.js index dd0e34c3d3bd95..264b819fc7692a 100644 --- a/deps/npm/lib/commands/prefix.js +++ b/deps/npm/lib/commands/prefix.js @@ -5,7 +5,6 @@ class Prefix extends BaseCommand { static name = 'prefix' static params = ['global'] static usage = ['[-g]'] - static ignoreImplicitWorkspace = true async exec (args) { return this.npm.output(this.npm.prefix) diff --git a/deps/npm/lib/commands/profile.js b/deps/npm/lib/commands/profile.js index 27060cf73a6502..e42ebb276d202e 100644 --- a/deps/npm/lib/commands/profile.js +++ b/deps/npm/lib/commands/profile.js @@ -54,8 +54,6 @@ class Profile extends BaseCommand { 'otp', ] - static ignoreImplicitWorkspace = true - async completion (opts) { var argv = opts.conf.argv.remain @@ -221,7 +219,7 @@ class Profile extends BaseCommand { newUser[prop] = value - const result = await otplease(this.npm, conf, conf => npmProfile.set(newUser, conf)) + const result = await otplease(this.npm, conf, c => npmProfile.set(newUser, c)) if (this.npm.config.get('json')) { this.npm.output(JSON.stringify({ [prop]: result[prop] }, null, 2)) diff --git a/deps/npm/lib/commands/publish.js b/deps/npm/lib/commands/publish.js index 23323a174ed897..76faea9457f748 100644 --- a/deps/npm/lib/commands/publish.js +++ b/deps/npm/lib/commands/publish.js @@ -38,6 +38,7 @@ class Publish extends BaseCommand { ] static usage = [''] + static workspaces = true static ignoreImplicitWorkspace = false async exec (args) { @@ -123,7 +124,7 @@ class Publish extends BaseCommand { log.notice('', msg) if (!dryRun) { - await otplease(this.npm, opts, opts => libpub(manifest, tarballData, opts)) + await otplease(this.npm, opts, o => libpub(manifest, tarballData, o)) } if (spec.type === 'directory' && !ignoreScripts) { @@ -155,14 +156,14 @@ class Publish extends BaseCommand { return pkgContents } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { // Suppresses JSON output in publish() so we can handle it here this.suppressOutput = true const results = {} const json = this.npm.config.get('json') const { silent } = this.npm - await this.setWorkspaces(filters) + await this.setWorkspaces() for (const [name, workspace] of this.workspaces.entries()) { let pkgContents diff --git a/deps/npm/lib/commands/query.js b/deps/npm/lib/commands/query.js index 5f05ab3164d7c5..b5f4d8e57ddf5e 100644 --- a/deps/npm/lib/commands/query.js +++ b/deps/npm/lib/commands/query.js @@ -41,6 +41,7 @@ class Query extends BaseCommand { static name = 'query' static usage = [''] + static workspaces = true static ignoreImplicitWorkspace = false static params = [ @@ -70,8 +71,8 @@ class Query extends BaseCommand { this.npm.output(this.parsedResponse) } - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) + async execWorkspaces (args) { + await this.setWorkspaces() const opts = { ...this.npm.flatOptions, path: this.npm.prefix, diff --git a/deps/npm/lib/commands/restart.js b/deps/npm/lib/commands/restart.js index 575928b2202cc8..7ca2eb323da3ca 100644 --- a/deps/npm/lib/commands/restart.js +++ b/deps/npm/lib/commands/restart.js @@ -8,7 +8,6 @@ class Restart extends LifecycleCmd { 'ignore-scripts', 'script-shell', ] - - static ignoreImplicitWorkspace = false } + module.exports = Restart diff --git a/deps/npm/lib/commands/root.js b/deps/npm/lib/commands/root.js index b814034def5ab6..7749c602456b77 100644 --- a/deps/npm/lib/commands/root.js +++ b/deps/npm/lib/commands/root.js @@ -3,7 +3,6 @@ class Root extends BaseCommand { static description = 'Display npm root' static name = 'root' static params = ['global'] - static ignoreImplicitWorkspace = true async exec () { this.npm.output(this.npm.dir) diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js index 3852f7ba1820f5..51746c5e5285d9 100644 --- a/deps/npm/lib/commands/run-script.js +++ b/deps/npm/lib/commands/run-script.js @@ -41,6 +41,7 @@ class RunScript extends BaseCommand { static name = 'run-script' static usage = [' [-- ]'] + static workspaces = true static ignoreImplicitWorkspace = false static isShellout = true @@ -62,11 +63,11 @@ class RunScript extends BaseCommand { } } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { if (args.length) { - return this.runWorkspaces(args, filters) + return this.runWorkspaces(args) } else { - return this.listWorkspaces(args, filters) + return this.listWorkspaces(args) } } @@ -121,11 +122,11 @@ class RunScript extends BaseCommand { banner: !this.npm.silent, } - for (const [event, args] of events) { + for (const [ev, evArgs] of events) { await runScript({ ...opts, - event, - args, + event: ev, + args: evArgs, }) } } @@ -200,7 +201,7 @@ class RunScript extends BaseCommand { async runWorkspaces (args, filters) { const res = [] - await this.setWorkspaces(filters) + await this.setWorkspaces() for (const workspacePath of this.workspacePaths) { const pkg = await rpj(`${workspacePath}/package.json`) @@ -233,7 +234,7 @@ class RunScript extends BaseCommand { } async listWorkspaces (args, filters) { - await this.setWorkspaces(filters) + await this.setWorkspaces() if (this.npm.silent) { return diff --git a/deps/npm/lib/commands/search.js b/deps/npm/lib/commands/search.js index 8751e9e7d22fd0..7419e974546888 100644 --- a/deps/npm/lib/commands/search.js +++ b/deps/npm/lib/commands/search.js @@ -51,7 +51,6 @@ class Search extends BaseCommand { ] static usage = ['[search terms ...]'] - static ignoreImplicitWorkspace = true async exec (args) { const opts = { diff --git a/deps/npm/lib/commands/start.js b/deps/npm/lib/commands/start.js index d84ad23ebafa63..a16eade24d21ed 100644 --- a/deps/npm/lib/commands/start.js +++ b/deps/npm/lib/commands/start.js @@ -8,7 +8,6 @@ class Start extends LifecycleCmd { 'ignore-scripts', 'script-shell', ] - - static ignoreImplicitWorkspace = false } + module.exports = Start diff --git a/deps/npm/lib/commands/stop.js b/deps/npm/lib/commands/stop.js index db497675a694b6..ae3031f06dd96c 100644 --- a/deps/npm/lib/commands/stop.js +++ b/deps/npm/lib/commands/stop.js @@ -8,7 +8,6 @@ class Stop extends LifecycleCmd { 'ignore-scripts', 'script-shell', ] - - static ignoreImplicitWorkspace = false } + module.exports = Stop diff --git a/deps/npm/lib/commands/test.js b/deps/npm/lib/commands/test.js index 43be934894dd7a..eccc47fc3341c7 100644 --- a/deps/npm/lib/commands/test.js +++ b/deps/npm/lib/commands/test.js @@ -8,7 +8,6 @@ class Test extends LifecycleCmd { 'ignore-scripts', 'script-shell', ] - - static ignoreImplicitWorkspace = false } + module.exports = Test diff --git a/deps/npm/lib/commands/token.js b/deps/npm/lib/commands/token.js index de8e61101d8acd..8da83118757144 100644 --- a/deps/npm/lib/commands/token.js +++ b/deps/npm/lib/commands/token.js @@ -14,7 +14,6 @@ class Token extends BaseCommand { static name = 'token' static usage = ['list', 'revoke ', 'create [--read-only] [--cidr=list]'] static params = ['read-only', 'cidr', 'registry', 'otp'] - static ignoreImplicitWorkspace = true async completion (opts) { const argv = opts.conf.argv.remain @@ -30,7 +29,7 @@ class Token extends BaseCommand { throw new Error(argv[2] + ' not recognized') } - async exec (args, cb) { + async exec (args) { log.gauge.show('token') if (args.length === 0) { return this.list() @@ -121,9 +120,7 @@ class Token extends BaseCommand { }) await Promise.all( toRemove.map(key => { - return otplease(this.npm, conf, conf => { - return profile.removeToken(key, conf) - }) + return otplease(this.npm, conf, c => profile.removeToken(key, c)) }) ) if (conf.json) { @@ -144,9 +141,7 @@ class Token extends BaseCommand { const validCIDR = this.validateCIDRList(cidr) log.info('token', 'creating') const result = await pulseTillDone.withPromise( - otplease(this.npm, conf, conf => { - return profile.createToken(password, readonly, validCIDR, conf) - }) + otplease(this.npm, conf, c => profile.createToken(password, readonly, validCIDR, c)) ) delete result.key delete result.updated @@ -216,7 +211,7 @@ class Token extends BaseCommand { } validateCIDRList (cidrs) { - const maybeList = cidrs ? (Array.isArray(cidrs) ? cidrs : [cidrs]) : [] + const maybeList = [].concat(cidrs).filter(Boolean) const list = maybeList.length === 1 ? maybeList[0].split(/,\s*/) : maybeList for (const cidr of list) { if (isCidrV6(cidr)) { diff --git a/deps/npm/lib/commands/uninstall.js b/deps/npm/lib/commands/uninstall.js index e4a193cc5ca4ed..8c44f2e32106c2 100644 --- a/deps/npm/lib/commands/uninstall.js +++ b/deps/npm/lib/commands/uninstall.js @@ -20,19 +20,13 @@ class Uninstall extends ArboristWorkspaceCmd { } async exec (args) { - // the /path/to/node_modules/.. - const path = this.npm.global - ? resolve(this.npm.globalDir, '..') - : this.npm.localPrefix - if (!args.length) { if (!this.npm.global) { throw new Error('Must provide a package name to remove') } else { - let pkg - try { - pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')) + const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')) + args.push(pkg.name) } catch (er) { if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') { throw er @@ -40,11 +34,14 @@ class Uninstall extends ArboristWorkspaceCmd { throw this.usageError() } } - - args.push(pkg.name) } } + // the /path/to/node_modules/.. + const path = this.npm.global + ? resolve(this.npm.globalDir, '..') + : this.npm.localPrefix + const opts = { ...this.npm.flatOptions, path, diff --git a/deps/npm/lib/commands/unpublish.js b/deps/npm/lib/commands/unpublish.js index 268c8c3daedbbd..9985e2e39f1405 100644 --- a/deps/npm/lib/commands/unpublish.js +++ b/deps/npm/lib/commands/unpublish.js @@ -21,6 +21,7 @@ class Unpublish extends BaseCommand { static name = 'unpublish' static params = ['dry-run', 'force', 'workspace', 'workspaces'] static usage = ['[]'] + static workspaces = true static ignoreImplicitWorkspace = false async getKeysOfVersions (name, opts) { @@ -130,15 +131,15 @@ class Unpublish extends BaseCommand { } if (!dryRun) { - await otplease(this.npm, opts, opts => libunpub(spec, opts)) + await otplease(this.npm, opts, o => libunpub(spec, o)) } if (!silent) { this.npm.output(`- ${pkgName}${pkgVersion}`) } } - async execWorkspaces (args, filters) { - await this.setWorkspaces(filters) + async execWorkspaces (args) { + await this.setWorkspaces() const force = this.npm.config.get('force') if (!force) { diff --git a/deps/npm/lib/commands/update.js b/deps/npm/lib/commands/update.js index be9d35093d43bf..fd30bcb41e2b3f 100644 --- a/deps/npm/lib/commands/update.js +++ b/deps/npm/lib/commands/update.js @@ -40,9 +40,7 @@ class Update extends ArboristWorkspaceCmd { async exec (args) { const update = args.length === 0 ? true : args const global = path.resolve(this.npm.globalDir, '..') - const where = this.npm.global - ? global - : this.npm.prefix + const where = this.npm.global ? global : this.npm.prefix // In the context of `npm update` the save // config value should default to `false` diff --git a/deps/npm/lib/commands/version.js b/deps/npm/lib/commands/version.js index ab59fff5a308cc..a5232836717917 100644 --- a/deps/npm/lib/commands/version.js +++ b/deps/npm/lib/commands/version.js @@ -22,6 +22,7 @@ class Version extends BaseCommand { 'include-workspace-root', ] + static workspaces = true static ignoreImplicitWorkspace = false /* eslint-disable-next-line max-len */ @@ -60,12 +61,12 @@ class Version extends BaseCommand { } } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { switch (args.length) { case 0: - return this.listWorkspaces(filters) + return this.listWorkspaces() case 1: - return this.changeWorkspaces(args, filters) + return this.changeWorkspaces(args) default: throw this.usageError() } @@ -80,9 +81,9 @@ class Version extends BaseCommand { return this.npm.output(`${prefix}${version}`) } - async changeWorkspaces (args, filters) { + async changeWorkspaces (args) { const prefix = this.npm.config.get('tag-version-prefix') - await this.setWorkspaces(filters) + await this.setWorkspaces() const updatedWorkspaces = [] for (const [name, path] of this.workspaces) { this.npm.output(name) @@ -120,9 +121,9 @@ class Version extends BaseCommand { } } - async listWorkspaces (filters) { + async listWorkspaces () { const results = {} - await this.setWorkspaces(filters) + await this.setWorkspaces() for (const path of this.workspacePaths) { const pj = resolve(path, 'package.json') // setWorkspaces has already parsed package.json so we know it won't error diff --git a/deps/npm/lib/commands/view.js b/deps/npm/lib/commands/view.js index 32b2d0f92a1a68..855b37b81d42f9 100644 --- a/deps/npm/lib/commands/view.js +++ b/deps/npm/lib/commands/view.js @@ -1,8 +1,3 @@ -/* eslint-disable no-console */ -// XXX: remove console.log later - -// npm view [pkg [pkg ...]] - const chalk = require('chalk') const columns = require('cli-columns') const fs = require('fs') @@ -31,8 +26,8 @@ class View extends BaseCommand { 'include-workspace-root', ] + static workspaces = true static ignoreImplicitWorkspace = false - static usage = ['[] [[.subfield]...]'] async completion (opts) { @@ -127,12 +122,12 @@ class View extends BaseCommand { const msg = await this.jsonData(reducedData, pckmnt._id) if (msg !== '') { - console.log(msg) + this.npm.output(msg) } } } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { if (!args.length) { args = ['.'] } @@ -150,7 +145,7 @@ class View extends BaseCommand { args = [''] // getData relies on this } const results = {} - await this.setWorkspaces(filters) + await this.setWorkspaces() for (const name of this.workspaceNames) { const wsPkg = `${name}${pkg.slice(1)}` const [pckmnt, data] = await this.getData(wsPkg, args) @@ -166,10 +161,10 @@ class View extends BaseCommand { if (wholePackument) { data.map((v) => this.prettyView(pckmnt, v[Object.keys(v)[0]][''])) } else { - console.log(`${name}:`) + this.npm.output(`${name}:`) const msg = await this.jsonData(reducedData, pckmnt._id) if (msg !== '') { - console.log(msg) + this.npm.output(msg) } } } else { @@ -180,7 +175,7 @@ class View extends BaseCommand { } } if (Object.keys(results).length > 0) { - console.log(JSON.stringify(results, null, 2)) + this.npm.output(JSON.stringify(results, null, 2)) } } @@ -317,13 +312,13 @@ class View extends BaseCommand { return msg.trim() } - prettyView (packument, manifest) { + prettyView (packu, manifest) { // More modern, pretty printing of default view const unicode = this.npm.config.get('unicode') const tags = [] - Object.keys(packument['dist-tags']).forEach((t) => { - const version = packument['dist-tags'][t] + Object.keys(packu['dist-tags']).forEach((t) => { + const version = packu['dist-tags'][t] tags.push(`${chalk.bold.green(t)}: ${version}`) }) const unpackedSize = manifest.dist.unpackedSize && @@ -333,10 +328,10 @@ class View extends BaseCommand { name: chalk.green(manifest.name), version: chalk.green(manifest.version), bins: Object.keys(manifest.bin || {}), - versions: chalk.yellow(packument.versions.length + ''), + versions: chalk.yellow(packu.versions.length + ''), description: manifest.description, deprecated: manifest.deprecated, - keywords: packument.keywords || [], + keywords: packu.keywords || [], license: typeof licenseField === 'string' ? licenseField : (licenseField.type || 'Proprietary'), @@ -347,9 +342,9 @@ class View extends BaseCommand { name: chalk.yellow(manifest._npmUser.name), email: chalk.cyan(manifest._npmUser.email), }), - modified: !packument.time ? undefined - : chalk.yellow(relativeDate(packument.time[manifest.version])), - maintainers: (packument.maintainers || []).map((u) => unparsePerson({ + modified: !packu.time ? undefined + : chalk.yellow(relativeDate(packu.time[manifest.version])), + maintainers: (packu.maintainers || []).map((u) => unparsePerson({ name: chalk.yellow(u.name), email: chalk.cyan(u.email), })), @@ -376,61 +371,61 @@ class View extends BaseCommand { info.license = chalk.green(info.license) } - console.log('') - console.log( + this.npm.output('') + this.npm.output( chalk.underline.bold(`${info.name}@${info.version}`) + ' | ' + info.license + ' | deps: ' + (info.deps.length ? chalk.cyan(info.deps.length) : chalk.green('none')) + ' | versions: ' + info.versions ) - info.description && console.log(info.description) + info.description && this.npm.output(info.description) if (info.repo || info.site) { - info.site && console.log(chalk.cyan(info.site)) + info.site && this.npm.output(chalk.cyan(info.site)) } const warningSign = unicode ? ' ⚠️ ' : '!!' - info.deprecated && console.log( + info.deprecated && this.npm.output( `\n${chalk.bold.red('DEPRECATED')}${ warningSign } - ${info.deprecated}` ) if (info.keywords.length) { - console.log('') - console.log('keywords:', chalk.yellow(info.keywords.join(', '))) + this.npm.output('') + this.npm.output('keywords:', chalk.yellow(info.keywords.join(', '))) } if (info.bins.length) { - console.log('') - console.log('bin:', chalk.yellow(info.bins.join(', '))) + this.npm.output('') + this.npm.output('bin:', chalk.yellow(info.bins.join(', '))) } - console.log('') - console.log('dist') - console.log('.tarball:', info.tarball) - console.log('.shasum:', info.shasum) - info.integrity && console.log('.integrity:', info.integrity) - info.unpackedSize && console.log('.unpackedSize:', info.unpackedSize) + this.npm.output('') + this.npm.output('dist') + this.npm.output('.tarball:', info.tarball) + this.npm.output('.shasum:', info.shasum) + info.integrity && this.npm.output('.integrity:', info.integrity) + info.unpackedSize && this.npm.output('.unpackedSize:', info.unpackedSize) const maxDeps = 24 if (info.deps.length) { - console.log('') - console.log('dependencies:') - console.log(columns(info.deps.slice(0, maxDeps), { padding: 1 })) + this.npm.output('') + this.npm.output('dependencies:') + this.npm.output(columns(info.deps.slice(0, maxDeps), { padding: 1 })) if (info.deps.length > maxDeps) { - console.log(`(...and ${info.deps.length - maxDeps} more.)`) + this.npm.output(`(...and ${info.deps.length - maxDeps} more.)`) } } if (info.maintainers && info.maintainers.length) { - console.log('') - console.log('maintainers:') - info.maintainers.forEach((u) => console.log('-', u)) + this.npm.output('') + this.npm.output('maintainers:') + info.maintainers.forEach((u) => this.npm.output('-', u)) } - console.log('') - console.log('dist-tags:') - console.log(columns(info.tags)) + this.npm.output('') + this.npm.output('dist-tags:') + this.npm.output(columns(info.tags)) if (info.publisher || info.modified) { let publishInfo = 'published' @@ -440,8 +435,8 @@ class View extends BaseCommand { if (info.publisher) { publishInfo += ` by ${info.publisher}` } - console.log('') - console.log(publishInfo) + this.npm.output('') + this.npm.output(publishInfo) } } } diff --git a/deps/npm/lib/commands/whoami.js b/deps/npm/lib/commands/whoami.js index 4497f9b3a542d5..154cc870391ba1 100644 --- a/deps/npm/lib/commands/whoami.js +++ b/deps/npm/lib/commands/whoami.js @@ -5,7 +5,6 @@ class Whoami extends BaseCommand { static description = 'Display npm username' static name = 'whoami' static params = ['registry'] - static ignoreImplicitWorkspace = true async exec (args) { const username = await getIdentity(this.npm, { ...this.npm.flatOptions }) diff --git a/deps/npm/lib/lifecycle-cmd.js b/deps/npm/lib/lifecycle-cmd.js index 41633a4ba389c9..848771a38355e5 100644 --- a/deps/npm/lib/lifecycle-cmd.js +++ b/deps/npm/lib/lifecycle-cmd.js @@ -5,12 +5,14 @@ const BaseCommand = require('./base-command.js') class LifecycleCmd extends BaseCommand { static usage = ['[-- ]'] static isShellout = true + static workspaces = true + static ignoreImplicitWorkspace = false - async exec (args, cb) { + async exec (args) { return this.npm.exec('run-script', [this.constructor.name, ...args]) } - async execWorkspaces (args, filters, cb) { + async execWorkspaces (args) { return this.npm.exec('run-script', [this.constructor.name, ...args]) } } diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index 0bdbcdb9efd8ba..841d145ddcbad7 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -20,25 +20,24 @@ const updateNotifier = require('./utils/update-notifier.js') const pkg = require('../package.json') const cmdList = require('./utils/cmd-list.js') -let warnedNonDashArg = false -const _load = Symbol('_load') - class Npm extends EventEmitter { static get version () { return pkg.version } - command = null updateNotification = null loadErr = null argv = [] + #command = null #runId = new Date().toISOString().replace(/[.:]/g, '_') #loadPromise = null #tmpFolder = null #title = 'npm' #argvClean = [] #chalk = null + #npmRoot = null + #warnedNonDashArg = false #outputBuffer = [] #logFile = new LogFile() @@ -52,12 +51,30 @@ class Npm extends EventEmitter { }, }) - config = new Config({ - npmPath: dirname(__dirname), - definitions, - flatten, - shorthands, - }) + // all these options are only used by tests in order to make testing more + // closely resemble real world usage. for now, npm has no programmatic API so + // it is ok to add stuff here, but we should not rely on it more than + // necessary. XXX: make these options not necessary by refactoring @npmcli/config + // - npmRoot: this is where npm looks for docs files and the builtin config + // - argv: this allows tests to extend argv in the same way the argv would + // be passed in via a CLI arg. + // - excludeNpmCwd: this is a hack to get @npmcli/config to stop walking up + // dirs to set a local prefix when it encounters the `npmRoot`. this + // allows tests created by tap inside this repo to not set the local + // prefix to `npmRoot` since that is the first dir it would encounter when + // doing implicit detection + constructor ({ npmRoot = dirname(__dirname), argv = [], excludeNpmCwd = false } = {}) { + super() + this.#npmRoot = npmRoot + this.config = new Config({ + npmPath: this.#npmRoot, + definitions, + flatten, + shorthands, + argv: [...process.argv, ...argv], + excludeNpmCwd, + }) + } get version () { return this.constructor.version @@ -89,44 +106,31 @@ class Npm extends EventEmitter { async cmd (cmd) { await this.load() - // when location isn't set and global isn't true - // check for a package.json at the localPrefix - // and set the location to project if found - // TODO: this logic can move to the config module loadLocalPrefix to - // avoid double stat calls and consolidate logic - if (this.config.isDefault('location') && !this.config.get('global')) { - const hasPackageJson = await fs.stat(resolve(this.config.localPrefix, 'package.json')) - .then((st) => st.isFile()) - .catch(() => false) - if (hasPackageJson) { - this.config.set('location', 'project') - } - } - - const command = this.deref(cmd) - if (!command) { + const cmdId = this.deref(cmd) + if (!cmdId) { throw Object.assign(new Error(`Unknown command ${cmd}`), { code: 'EUNKNOWNCOMMAND', }) } - const Impl = require(`./commands/${command}.js`) - const impl = new Impl(this) - return impl - } - // Call an npm command - async exec (cmd, args) { - const command = await this.cmd(cmd) - const timeEnd = this.time(`command:${cmd}`) + const Impl = require(`./commands/${cmdId}.js`) + const command = new Impl(this) // since 'test', 'start', 'stop', etc. commands re-enter this function // to call the run-script command, we need to only set it one time. - if (!this.command) { - process.env.npm_command = command.name - this.command = command.name - this.commandInstance = command + if (!this.#command) { + this.#command = command + process.env.npm_command = this.command } + return command + } + + // Call an npm command + async exec (cmd, args = this.argv) { + const command = await this.cmd(cmd) + const timeEnd = this.time(`command:${cmd}`) + // this is async but we dont await it, since its ok if it doesnt // finish before the command finishes running. it uses command and argv // so it must be initiated here, after the command name is set @@ -135,72 +139,27 @@ class Npm extends EventEmitter { // Options are prefixed by a hyphen-minus (-, \u2d). // Other dash-type chars look similar but are invalid. - if (!warnedNonDashArg) { - args - .filter(arg => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(arg)) - .forEach(arg => { - warnedNonDashArg = true - log.error( - 'arg', - 'Argument starts with non-ascii dash, this is probably invalid:', - arg - ) - }) - } - - const workspacesEnabled = this.config.get('workspaces') - // if cwd is a workspace, the default is set to [that workspace] - const implicitWorkspace = this.config.get('workspace', 'default').length > 0 - const workspacesFilters = this.config.get('workspace') - const includeWorkspaceRoot = this.config.get('include-workspace-root') - // only call execWorkspaces when we have workspaces explicitly set - // or when it is implicit and not in our ignore list - const hasWorkspaceFilters = workspacesFilters.length > 0 - const invalidWorkspaceConfig = workspacesEnabled === false && hasWorkspaceFilters - - // (-ws || -w foo) && (cwd is not a workspace || command is not ignoring implicit workspaces) - const filterByWorkspaces = (workspacesEnabled || hasWorkspaceFilters) && - (!implicitWorkspace || !command.ignoreImplicitWorkspace) - // normally this would go in the constructor, but our tests don't - // actually use a real npm object so this.npm.config isn't always - // populated. this is the compromise until we can make that a reality - // and then move this into the constructor. - command.workspaces = workspacesEnabled - command.workspacePaths = null - // normally this would be evaluated in base-command#setWorkspaces, see - // above for explanation - command.includeWorkspaceRoot = includeWorkspaceRoot - - let execPromise = Promise.resolve() - if (this.config.get('usage')) { - this.output(command.usage) - } else if (invalidWorkspaceConfig) { - execPromise = Promise.reject( - new Error('Can not use --no-workspaces and --workspace at the same time')) - } else if (filterByWorkspaces) { - if (this.global) { - execPromise = Promise.reject(new Error('Workspaces not supported for global packages')) - } else { - execPromise = command.execWorkspaces(args, workspacesFilters) + if (!this.#warnedNonDashArg) { + const nonDashArgs = args.filter(a => /^[\u2010-\u2015\u2212\uFE58\uFE63\uFF0D]/.test(a)) + if (nonDashArgs.length) { + this.#warnedNonDashArg = true + log.error( + 'arg', + 'Argument starts with non-ascii dash, this is probably invalid:', + nonDashArgs.join(', ') + ) } - } else { - execPromise = command.exec(args) } - return execPromise.finally(timeEnd) + return command.cmdExec(args).finally(timeEnd) } async load () { if (!this.#loadPromise) { - this.#loadPromise = this.time('npm:load', async () => { - await this[_load]().catch((er) => { - this.loadErr = er - throw er - }) - if (this.config.get('force')) { - log.warn('using --force', 'Recommended protections disabled.') - } - }) + this.#loadPromise = this.time('npm:load', () => this.#load().catch((er) => { + this.loadErr = er + throw er + })) } return this.#loadPromise } @@ -240,21 +199,17 @@ class Npm extends EventEmitter { this.#title = t } - async [_load] () { - const node = this.time('npm:load:whichnode', () => { - try { - return which.sync(process.argv[0]) - } catch { - // TODO should we throw here? + async #load () { + await this.time('npm:load:whichnode', async () => { + // TODO should we throw here? + const node = await which(process.argv[0]).catch(() => {}) + if (node && node.toUpperCase() !== process.execPath.toUpperCase()) { + log.verbose('node symlink', node) + process.execPath = node + this.config.execPath = node } }) - if (node && node.toUpperCase() !== process.execPath.toUpperCase()) { - log.verbose('node symlink', node) - process.execPath = node - this.config.execPath = node - } - await this.time('npm:load:configload', () => this.config.load()) // mkdir this separately since the logs dir can be set to @@ -323,6 +278,18 @@ class Npm extends EventEmitter { this.config.set('scope', `@${configScope}`, this.config.find('scope')) } }) + + if (this.config.get('force')) { + log.warn('using --force', 'Recommended protections disabled.') + } + } + + get isShellout () { + return this.#command?.constructor?.isShellout + } + + get command () { + return this.#command?.name } get flatOptions () { @@ -346,6 +313,10 @@ class Npm extends EventEmitter { return this.flatOptions.color } + get logColor () { + return this.flatOptions.logColor + } + get chalk () { if (!this.#chalk) { let level = chalk.level @@ -361,10 +332,6 @@ class Npm extends EventEmitter { return this.config.get('global') || this.config.get('location') === 'global' } - get logColor () { - return this.flatOptions.logColor - } - get silent () { return this.flatOptions.silent } @@ -401,6 +368,10 @@ class Npm extends EventEmitter { return this.#timers.file } + get npmRoot () { + return this.#npmRoot + } + get cache () { return this.config.get('cache') } @@ -425,6 +396,10 @@ class Npm extends EventEmitter { this.config.localPrefix = r } + get localPackage () { + return this.config.localPackage + } + get globalDir () { return process.platform !== 'win32' ? resolve(this.globalPrefix, 'lib', 'node_modules') diff --git a/deps/npm/lib/package-url-cmd.js b/deps/npm/lib/package-url-cmd.js index eac2bbe1b6d516..20e6a16fe15230 100644 --- a/deps/npm/lib/package-url-cmd.js +++ b/deps/npm/lib/package-url-cmd.js @@ -9,7 +9,6 @@ const log = require('./utils/log-shim') const BaseCommand = require('./base-command.js') class PackageUrlCommand extends BaseCommand { - static ignoreImplicitWorkspace = false static params = [ 'browser', 'registry', @@ -18,6 +17,8 @@ class PackageUrlCommand extends BaseCommand { 'include-workspace-root', ] + static workspaces = true + static ignoreImplicitWorkspace = false static usage = ['[ [ ...]]'] async exec (args) { @@ -41,11 +42,11 @@ class PackageUrlCommand extends BaseCommand { } } - async execWorkspaces (args, filters) { + async execWorkspaces (args) { if (args && args.length) { return this.exec(args) } - await this.setWorkspaces(filters) + await this.setWorkspaces() return this.exec(this.workspacePaths) } diff --git a/deps/npm/lib/utils/config/definitions.js b/deps/npm/lib/utils/config/definitions.js index 0f401d6572a59c..9ddbafd46f7bc8 100644 --- a/deps/npm/lib/utils/config/definitions.js +++ b/deps/npm/lib/utils/config/definitions.js @@ -163,7 +163,7 @@ define('access', { `, type: [null, 'restricted', 'public'], description: ` - If do not want your scoped package to be publicly viewable (and + If you do not want your scoped package to be publicly viewable (and installable) set \`--access=restricted\`. Unscoped packages can not be set to \`restricted\`. @@ -238,6 +238,7 @@ define('auth-type', { type: ['legacy', 'web'], description: ` What authentication strategy to use with \`login\`. + Note that if an \`otp\` config is given, this value will always be set to \`legacy\`. `, flatten, }) @@ -848,7 +849,7 @@ define('global-style', { type: Boolean, description: ` Only install direct dependencies in the top level \`node_modules\`, - but hoist on deeper dependendencies. + but hoist on deeper dependencies. Sets \`--install-strategy=shallow\`. `, deprecated: ` @@ -1465,7 +1466,13 @@ define('otp', { If not set, and a registry response fails with a challenge for a one-time password, npm will prompt on the command line for one. `, - flatten, + flatten (key, obj, flatOptions) { + flatten(key, obj, flatOptions) + if (obj.otp) { + obj['auth-type'] = 'legacy' + flatten('auth-type', obj, flatOptions) + } + }, }) define('package', { @@ -2021,7 +2028,7 @@ define('strict-peer-deps', { even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \`peerDependencies\` object. - When such and override is performed, a warning is printed, explaining the + When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \`--strict-peer-deps\` is set, then this warning is treated as a failure. `, @@ -2141,6 +2148,7 @@ define('unicode', { When set to true, npm uses unicode characters in the tree output. When false, it uses ascii characters instead of unicode glyphs. `, + flatten, }) define('update-notifier', { diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js index aee376120ba278..72c7b9fe4553fd 100644 --- a/deps/npm/lib/utils/error-message.js +++ b/deps/npm/lib/utils/error-message.js @@ -5,7 +5,21 @@ const replaceInfo = require('./replace-info.js') const { report } = require('./explain-eresolve.js') const log = require('./log-shim') -module.exports = (er, npm) => { +const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n') + +const jsonError = (er, npm, { summary, detail }) => { + if (npm?.config.loaded && npm.config.get('json')) { + return { + error: { + code: er.code, + summary: messageText(summary), + detail: messageText(detail), + }, + } + } +} + +const errorMessage = (er, npm) => { const short = [] const detail = [] const files = [] @@ -329,7 +343,7 @@ module.exports = (er, npm) => { 'Actual: ' + JSON.stringify({ npm: npm.version, - node: npm.config.loaded ? npm.config.get('node-version') : process.version, + node: process.version, }), ].join('\n'), ]) @@ -402,5 +416,7 @@ module.exports = (er, npm) => { break } - return { summary: short, detail, files } + return { summary: short, detail, files, json: jsonError(er, npm, { summary: short, detail }) } } + +module.exports = errorMessage diff --git a/deps/npm/lib/utils/exit-handler.js b/deps/npm/lib/utils/exit-handler.js index a9e061de7a4a53..b5fc7042bd0209 100644 --- a/deps/npm/lib/utils/exit-handler.js +++ b/deps/npm/lib/utils/exit-handler.js @@ -5,7 +5,6 @@ const log = require('./log-shim.js') const errorMessage = require('./error-message.js') const replaceInfo = require('./replace-info.js') -const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n') const indent = (val) => Array.isArray(val) ? val.map(v => indent(v)) : ` ${val}` let npm = null // set by the cli @@ -144,7 +143,7 @@ const exitHandler = err => { // will presumably print its own errors and exit with a proper status // code if there's a problem. If we got an error with a code=0, then... // something else went wrong along the way, so maybe an npm problem? - const isShellout = npm.commandInstance && npm.commandInstance.constructor.isShellout + const isShellout = npm.isShellout const quietShellout = isShellout && typeof err.code === 'number' && err.code if (quietShellout) { exitCode = err.code @@ -181,7 +180,8 @@ const exitHandler = err => { } } - const { summary, detail, files = [] } = errorMessage(err, npm) + const { summary, detail, json, files = [] } = errorMessage(err, npm) + jsonError = json for (let [file, content] of files) { file = `${npm.logPath}${file}` @@ -189,8 +189,8 @@ const exitHandler = err => { try { fs.writeFileSync(file, content) detail.push(['', `\n\nFor a full report see:\n${file}`]) - } catch (err) { - log.warn('', `Could not write error message to ${file} due to ${err}`) + } catch (logFileErr) { + log.warn('', `Could not write error message to ${file} due to ${logFileErr}`) } } @@ -198,16 +198,6 @@ const exitHandler = err => { log.error(...errline) } - if (hasLoadedNpm && npm.config.get('json')) { - jsonError = { - error: { - code: err.code, - summary: messageText(summary), - detail: messageText(detail), - }, - } - } - if (typeof err.errno === 'number') { exitCode = err.errno } else if (typeof err.code === 'number') { diff --git a/deps/npm/lib/utils/explain-dep.js b/deps/npm/lib/utils/explain-dep.js index cd53a2269640e6..58258026491dc1 100644 --- a/deps/npm/lib/utils/explain-dep.js +++ b/deps/npm/lib/utils/explain-dep.js @@ -103,13 +103,13 @@ const explainDependents = ({ name, dependents }, depth, color) => { const maxLen = 50 const showNames = [] for (let i = max; i < dependents.length; i++) { - const { from: { name = 'the root project' } } = dependents[i] - len += name.length + const { from: { name: depName = 'the root project' } } = dependents[i] + len += depName.length if (len >= maxLen && i < dependents.length - 1) { showNames.push('...') break } - showNames.push(name) + showNames.push(depName) } const show = `(${showNames.join(', ')})` messages.push(`${dependents.length - max} more ${show}`) diff --git a/deps/npm/lib/utils/log-file.js b/deps/npm/lib/utils/log-file.js index 2935697ac98ebc..f663997308ed6b 100644 --- a/deps/npm/lib/utils/log-file.js +++ b/deps/npm/lib/utils/log-file.js @@ -1,10 +1,10 @@ const os = require('os') -const path = require('path') +const { join, dirname, basename } = require('path') const { format, promisify } = require('util') -const rimraf = promisify(require('rimraf')) const glob = promisify(require('glob')) const MiniPass = require('minipass') const fsMiniPass = require('fs-minipass') +const fs = require('fs/promises') const log = require('./log-shim') const padZero = (n, length) => n.toString().padStart(length.toString().length, '0') @@ -197,7 +197,7 @@ class LogFiles { try { const logPath = this.#getLogFilePath() - const logGlob = path.join(path.dirname(logPath), path.basename(logPath) + const logGlob = join(dirname(logPath), basename(logPath) // tell glob to only match digits .replace(/\d/g, '[0123456789]') // Handle the old (prior to 8.2.0) log file names which did not have a @@ -217,7 +217,7 @@ class LogFiles { for (const file of files.slice(0, toDelete)) { try { - await rimraf(file, { glob: false }) + await fs.rm(file, { force: true }) } catch (e) { log.silly('logfile', 'error removing log file', file, e) } diff --git a/deps/npm/lib/utils/npm-usage.js b/deps/npm/lib/utils/npm-usage.js index 947a3073bc5ffe..b04ad33f9dd79f 100644 --- a/deps/npm/lib/utils/npm-usage.js +++ b/deps/npm/lib/utils/npm-usage.js @@ -1,4 +1,3 @@ -const { dirname } = require('path') const { commands } = require('./cmd-list') const COL_MAX = 60 @@ -36,7 +35,7 @@ or on the command line via: npm --key=value More configuration info: npm help config Configuration fields: npm help 7 config -npm@${npm.version} ${dirname(dirname(__dirname))}` +npm@${npm.version} ${npm.npmRoot}` } const cmdNames = () => { diff --git a/deps/npm/lib/utils/open-url.js b/deps/npm/lib/utils/open-url.js index 379640773fa6e2..f882d0c9d39341 100644 --- a/deps/npm/lib/utils/open-url.js +++ b/deps/npm/lib/utils/open-url.js @@ -31,7 +31,7 @@ const open = async (npm, url, errMsg, isFile) => { if (!/^https?:$/.test(new URL(url).protocol)) { throw new Error() } - } catch (_) { + } catch { throw new Error('Invalid URL: ' + url) } } diff --git a/deps/npm/lib/utils/queryable.js b/deps/npm/lib/utils/queryable.js index 7c5bb7fe87baff..6acc1758ceea70 100644 --- a/deps/npm/lib/utils/queryable.js +++ b/deps/npm/lib/utils/queryable.js @@ -1,5 +1,4 @@ const util = require('util') -const _data = Symbol('data') const _delete = Symbol('delete') const _append = Symbol('append') @@ -236,6 +235,8 @@ const setter = ({ data, key, value, force }) => { } class Queryable { + #data = null + constructor (obj) { if (!obj || typeof obj !== 'object') { throw Object.assign(new Error('Queryable needs an object to query properties from.'), { @@ -243,7 +244,7 @@ class Queryable { }) } - this[_data] = obj + this.#data = obj } query (queries) { @@ -251,12 +252,12 @@ class Queryable { // with the legacy API lib/view.js is consuming, if at some point // we refactor that command then we can revisit making this nicer if (queries === '') { - return { '': this[_data] } + return { '': this.#data } } const q = query => getter({ - data: this[_data], + data: this.#data, key: query, }) @@ -283,7 +284,7 @@ class Queryable { // and assigns `value` to the last property of the query chain set (query, value, { force } = {}) { setter({ - data: this[_data], + data: this.#data, key: query, value, force, @@ -293,14 +294,14 @@ class Queryable { // deletes the value of the property found at `query` delete (query) { setter({ - data: this[_data], + data: this.#data, key: query, value: _delete, }) } toJSON () { - return this[_data] + return this.#data } [util.inspect.custom] () { diff --git a/deps/npm/lib/utils/read-user-info.js b/deps/npm/lib/utils/read-user-info.js index ac24396c6abb94..26d5b36d55b582 100644 --- a/deps/npm/lib/utils/read-user-info.js +++ b/deps/npm/lib/utils/read-user-info.js @@ -28,7 +28,7 @@ function readOTP (msg = otpPrompt, otp, isRetry) { } return read({ prompt: msg, default: otp || '' }) - .then((otp) => readOTP(msg, otp, true)) + .then((rOtp) => readOTP(msg, rOtp, true)) } function readPassword (msg = passwordPrompt, password, isRetry) { @@ -37,7 +37,7 @@ function readPassword (msg = passwordPrompt, password, isRetry) { } return read({ prompt: msg, silent: true, default: password || '' }) - .then((password) => readPassword(msg, password, true)) + .then((rPassword) => readPassword(msg, rPassword, true)) } function readUsername (msg = usernamePrompt, username, isRetry) { @@ -51,7 +51,7 @@ function readUsername (msg = usernamePrompt, username, isRetry) { } return read({ prompt: msg, default: username || '' }) - .then((username) => readUsername(msg, username, true)) + .then((rUsername) => readUsername(msg, rUsername, true)) } function readEmail (msg = emailPrompt, email, isRetry) { diff --git a/deps/npm/lib/utils/reify-output.js b/deps/npm/lib/utils/reify-output.js index b5c3a593b8db0b..5ac7fa4b01896b 100644 --- a/deps/npm/lib/utils/reify-output.js +++ b/deps/npm/lib/utils/reify-output.js @@ -12,7 +12,7 @@ const log = require('./log-shim.js') const { depth } = require('treeverse') const ms = require('ms') -const auditReport = require('npm-audit-report') +const npmAuditReport = require('npm-audit-report') const { readTree: getFundingInfo } = require('libnpmfund') const auditError = require('./audit-error.js') @@ -112,7 +112,7 @@ const getAuditReport = (npm, report) => { const defaultAuditLevel = npm.command !== 'audit' ? 'none' : 'low' const auditLevel = npm.flatOptions.auditLevel || defaultAuditLevel - const res = auditReport(report, { + const res = npmAuditReport(report, { reporter, ...npm.flatOptions, auditLevel, diff --git a/deps/npm/lib/workspaces/get-workspaces.js b/deps/npm/lib/workspaces/get-workspaces.js index 373af1d689cc31..2ac043d5f3943d 100644 --- a/deps/npm/lib/workspaces/get-workspaces.js +++ b/deps/npm/lib/workspaces/get-workspaces.js @@ -42,7 +42,7 @@ const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom let msg = '!' if (filters.length) { msg = `:\n ${filters.reduce( - (res, filterArg) => `${res} --workspace=${filterArg}`, '')}` + (acc, filterArg) => `${acc} --workspace=${filterArg}`, '')}` } throw new Error(`No workspaces found${msg}`) diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 7cfb97a9bdd3f9..91381c80c2c909 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "December 2022" "" "" +.TH "NPM-ACCESS" "1" "January 2023" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" @@ -14,6 +14,8 @@ npm access grant \[lB]\[rB] npm access revoke \[lB]\[rB] .fi .RE +.P +Note: This command is unaware of workspaces. .SS "Description" .P Used to set access controls on private packages. diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index e5de0dff8fb5ea..4e9a687fb6752c 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "December 2022" "" "" +.TH "NPM-ADDUSER" "1" "January 2023" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" @@ -71,7 +71,7 @@ Type: "legacy" or "web" .RE 0 .P -What authentication strategy to use with \fBlogin\fR. +What authentication strategy to use with \fBlogin\fR. Note that if an \fBotp\fR config is given, this value will always be set to \fBlegacy\fR. .SS "See Also" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index e9d4063a5cec8d..f99fa41719b110 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "December 2022" "" "" +.TH "NPM-AUDIT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index 7d057e965a8664..61d2835a912352 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "December 2022" "" "" +.TH "NPM-BUGS" "1" "January 2023" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index 1cd91027282fd3..026f08d17d1ca2 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "December 2022" "" "" +.TH "NPM-CACHE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index e5fa670750432b..9335c9aec45f8c 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "December 2022" "" "" +.TH "NPM-CI" "1" "January 2023" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" @@ -142,7 +142,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBomit\fR" .RS 0 .IP \(bu 4 @@ -172,7 +172,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index 2d66e7e042b77d..c2f5d2a2506d23 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "December 2022" "" "" +.TH "NPM-COMPLETION" "1" "January 2023" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index ea57fe94b002ee..1f4e831af8d63b 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "December 2022" "" "" +.TH "NPM-CONFIG" "1" "January 2023" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index bbd1fc413d77c7..ff45a8d0fd0a43 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "December 2022" "" "" +.TH "NPM-DEDUPE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" @@ -98,7 +98,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBstrict-peer-deps\fR" .RS 0 .IP \(bu 4 @@ -112,7 +112,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index e97357b5bcae1b..ad4f34b53c263e 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "December 2022" "" "" +.TH "NPM-DEPRECATE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index b9f11879b79623..410548534a6e0d 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "December 2022" "" "" +.TH "NPM-DIFF" "1" "January 2023" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index bf05b601a81cd6..f87010d6019de8 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "December 2022" "" "" +.TH "NPM-DIST-TAG" "1" "January 2023" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index d5901813913f31..c3832ae6f52279 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "December 2022" "" "" +.TH "NPM-DOCS" "1" "January 2023" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index 776d50ea8edd6b..a33e7acf628325 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "December 2022" "" "" +.TH "NPM-DOCTOR" "1" "January 2023" "" "" .SH "NAME" \fBnpm-doctor\fR - Check your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index 80f7a55df5ab90..4b933ac01c88a4 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "December 2022" "" "" +.TH "NPM-EDIT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index b13c9f3d21465d..b8b6cc7e96187a 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "December 2022" "" "" +.TH "NPM-EXEC" "1" "January 2023" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 6897f448190ff0..b1b5305d0d99e7 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "December 2022" "" "" +.TH "NPM-EXPLAIN" "1" "January 2023" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index fed8a1b25a3538..de402db28c648e 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "December 2022" "" "" +.TH "NPM-EXPLORE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index fe29d9dd7fe311..5ec681c48c39ea 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "December 2022" "" "" +.TH "NPM-FIND-DUPES" "1" "January 2023" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" @@ -45,7 +45,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBstrict-peer-deps\fR" .RS 0 .IP \(bu 4 @@ -59,7 +59,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index b5f2cbdd58e2fb..7c1fe64f033cd1 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "December 2022" "" "" +.TH "NPM-FUND" "1" "January 2023" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 939aa4fd343ced..5d7a40e7ca3cec 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "December 2022" "" "" +.TH "NPM-HELP-SEARCH" "1" "January 2023" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 15b73a5ce73164..c0e77e0b5fc83d 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "December 2022" "" "" +.TH "NPM-HELP" "1" "January 2023" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index d38ad36171c5a6..8ba80f625cebbb 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "December 2022" "" "" +.TH "NPM-HOOK" "1" "January 2023" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 298bdecf3e9fab..f5fc5fbc359195 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,11 +1,11 @@ -.TH "NPM-INIT" "1" "December 2022" "" "" +.TH "NPM-INIT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" .P .RS 2 .nf -npm init (same as `npx ) +npm init (same as `npx `) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 33abfcad8e4630..96b0bf5acc4c55 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "December 2022" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "January 2023" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" @@ -90,7 +90,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBomit\fR" .RS 0 .IP \(bu 4 @@ -120,7 +120,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 9f3914208a62d7..5b5b6823cf3ca4 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "December 2022" "" "" +.TH "NPM-INSTALL-TEST" "1" "January 2023" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" @@ -90,7 +90,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBomit\fR" .RS 0 .IP \(bu 4 @@ -120,7 +120,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 3211b0c6f1e9d5..55e5abe3f888a9 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "December 2022" "" "" +.TH "NPM-INSTALL" "1" "January 2023" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -452,7 +452,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBomit\fR" .RS 0 .IP \(bu 4 @@ -482,7 +482,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index adbc8b4db5d81d..42ecdee92aa522 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "December 2022" "" "" +.TH "NPM-LINK" "1" "January 2023" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" @@ -157,7 +157,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBstrict-peer-deps\fR" .RS 0 .IP \(bu 4 @@ -171,7 +171,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index 1a10ce18cdac9c..3b99aefeecf75a 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "December 2022" "" "" +.TH "NPM-LOGIN" "1" "January 2023" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" @@ -75,7 +75,7 @@ Type: "legacy" or "web" .RE 0 .P -What authentication strategy to use with \fBlogin\fR. +What authentication strategy to use with \fBlogin\fR. Note that if an \fBotp\fR config is given, this value will always be set to \fBlegacy\fR. .SS "See Also" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 00201c97807783..3bb5aa671422ed 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "December 2022" "" "" +.TH "NPM-LOGOUT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 114cd16526d327..2458a585b79269 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "December 2022" "" "" +.TH "NPM-LS" "1" "January 2023" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@9.2.0 /path/to/npm +npm@9.3.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index d070b6a94181d2..ba4ee6c72a5ebc 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "December 2022" "" "" +.TH "NPM-ORG" "1" "January 2023" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index 0893d2d7cc7924..0cd7762886c959 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "December 2022" "" "" +.TH "NPM-OUTDATED" "1" "January 2023" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index eb30d26fa6c1be..f2015740b02cd9 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "December 2022" "" "" +.TH "NPM-OWNER" "1" "January 2023" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" @@ -12,8 +12,6 @@ npm owner ls alias: author .fi .RE -.P -Note: This command is unaware of workspaces. .SS "Description" .P Manage ownership of published packages. diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index ccb49505cdc9e3..934bc9cf4ec176 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "December 2022" "" "" +.TH "NPM-PACK" "1" "January 2023" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 79b3258e337928..59c5191676dba1 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "December 2022" "" "" +.TH "NPM-PING" "1" "January 2023" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index ea1233faeaa44a..4f3437fb2fedb1 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "December 2022" "" "" +.TH "NPM-PKG" "1" "January 2023" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index 0d1fcab353d15c..c51646626a4982 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "December 2022" "" "" +.TH "NPM-PREFIX" "1" "January 2023" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 4a03f77be08a2e..cd5cba95150b22 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "December 2022" "" "" +.TH "NPM-PROFILE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 958505dee0e08a..928ccd5d806082 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "December 2022" "" "" +.TH "NPM-PRUNE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index e191b644e053ca..d75c7b39eb60aa 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "December 2022" "" "" +.TH "NPM-PUBLISH" "1" "January 2023" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" @@ -84,7 +84,7 @@ Type: null, "restricted", or "public" .RE 0 .P -If do not want your scoped package to be publicly viewable (and installable) set \fB--access=restricted\fR. +If you do not want your scoped package to be publicly viewable (and installable) set \fB--access=restricted\fR. .P Unscoped packages can not be set to \fBrestricted\fR. .P diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 657b6a6a37d704..0e6d444e39948b 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "December 2022" "" "" +.TH "NPM-QUERY" "1" "January 2023" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 6a5f20689fc3c1..ce56c880b634af 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "December 2022" "" "" +.TH "NPM-REBUILD" "1" "January 2023" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 6a7b57ed671f6a..836ae8de258883 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "December 2022" "" "" +.TH "NPM-REPO" "1" "January 2023" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index e9e300ebff712a..00b83e280b5cfd 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "December 2022" "" "" +.TH "NPM-RESTART" "1" "January 2023" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 5157948d2316da..9333aa4545235b 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "December 2022" "" "" +.TH "NPM-ROOT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" @@ -8,6 +8,8 @@ npm root .fi .RE +.P +Note: This command is unaware of workspaces. .SS "Description" .P Print the effective \fBnode_modules\fR folder to standard out. diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index c4f97ca3287bac..44a6a76a8b3844 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "December 2022" "" "" +.TH "NPM-RUN-SCRIPT" "1" "January 2023" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index 6a7f6aa9827bdf..3fe6f69992ed6a 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "December 2022" "" "" +.TH "NPM-SEARCH" "1" "January 2023" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index f158ea070f774b..875f0b58bad501 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "December 2022" "" "" +.TH "NPM-SHRINKWRAP" "1" "January 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index 28fa89780d8950..627d11b3667df7 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "December 2022" "" "" +.TH "NPM-STAR" "1" "January 2023" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 60619f979edb96..6660429c892b48 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "December 2022" "" "" +.TH "NPM-STARS" "1" "January 2023" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index c9f72e52d15233..abdc41d8c5194d 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "December 2022" "" "" +.TH "NPM-START" "1" "January 2023" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 8c2a7e4456d090..9c9437aafe6543 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "December 2022" "" "" +.TH "NPM-STOP" "1" "January 2023" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index 35e18f7cdda049..917dbce667b9a3 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "December 2022" "" "" +.TH "NPM-TEAM" "1" "January 2023" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index dde7dfdbffb4a0..5863931a62e8fa 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "December 2022" "" "" +.TH "NPM-TEST" "1" "January 2023" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 868de1e51d7ce5..1c7f976201283e 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "December 2022" "" "" +.TH "NPM-TOKEN" "1" "January 2023" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 952dac51cb2b04..231b203955aa63 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "December 2022" "" "" +.TH "NPM-UNINSTALL" "1" "January 2023" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index b2c8ca2f2c8257..99024be559c3c3 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "December 2022" "" "" +.TH "NPM-UNPUBLISH" "1" "January 2023" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index b05159a0e53804..8263c1aad3f280 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "December 2022" "" "" +.TH "NPM-UNSTAR" "1" "January 2023" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index 9f00dfb01a30c3..644a6787de55a5 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "December 2022" "" "" +.TH "NPM-UPDATE" "1" "January 2023" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" @@ -198,7 +198,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBomit\fR" .RS 0 .IP \(bu 4 @@ -228,7 +228,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBpackage-lock\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index d76672560cd893..59248f1a4ef769 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "December 2022" "" "" +.TH "NPM-VERSION" "1" "January 2023" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index f4784653a79259..8de0a2d9f7df87 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "December 2022" "" "" +.TH "NPM-VIEW" "1" "January 2023" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 89db7bcf33a584..a609e66bf52059 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "December 2022" "" "" +.TH "NPM-WHOAMI" "1" "January 2023" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index b759c6a58efc85..e29df213a0402e 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "December 2022" "" "" +.TH "NPM" "1" "January 2023" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -8,9 +8,11 @@ npm .fi .RE +.P +Note: This command is unaware of workspaces. .SS "Version" .P -9.2.0 +9.3.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. @@ -80,7 +82,7 @@ User Configs: The file at \fB$HOME/.npmrc\fR is an ini-formatted list of configs .IP \(bu 4 Global Configs: The file found at \fB./etc/npmrc\fR (relative to the global prefix will be parsed if it is found. See npm help prefix for more info on the global prefix. If the \fBglobalconfig\fR option is set in the cli, env, or user config, then that file is parsed instead. .IP \(bu 4 -Defaults: npm's default configuration options are defined in lib/utils/config-defs.js. These must not be changed. +Defaults: npm's default configuration options are defined in \fBlib/utils/config/definitions.js\fR. These must not be changed. .RE 0 .P diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index f718ed321d7e15..ab3a83236bfcd1 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "December 2022" "" "" +.TH "NPX" "1" "January 2023" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index ea0acabd334c84..09870f292aee49 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "December 2022" "" "" +.TH "FOLDERS" "5" "January 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index a6758e93d728f4..ecc6a9b72379ca 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "December 2022" "" "" +.TH "INSTALL" "5" "January 2023" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" @@ -7,11 +7,11 @@ To publish and install packages to and from the public npm registry, you must in .SS "Overview" .RS 0 .IP \(bu 4 -\fBChecking your version of npm and Node.js\fR \fI\(la#checking-your-version-of-npm-and-node-js\(ra\fR +\fBChecking your version of npm and Node.js\fR \fI(Checking your version of npm and Node.js)\fR .IP \(bu 4 -\fBUsing a Node version manager to install Node.js and npm\fR \fI\(la#using-a-node-version-manager-to-install-node-js-and-npm\(ra\fR +\fBUsing a Node version manager to install Node.js and npm\fR \fI(Using a Node version manager to install Node.js and npm)\fR .IP \(bu 4 -\fBUsing a Node installer to install Node.js and npm\fR \fI\(la#using-a-node-installer-to-install-node-js-and-npm\(ra\fR +\fBUsing a Node installer to install Node.js and npm\fR \fI(Using a Node installer to install Node.js and npm)\fR .RE 0 .SS "Checking your version of npm and Node.js" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index ea0acabd334c84..09870f292aee49 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "December 2022" "" "" +.TH "FOLDERS" "5" "January 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index 8c6781b6a7703e..6d7c946a06110d 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "December 2022" "" "" +.TH "PACKAGE.JSON" "5" "January 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index dd6eba1ea3431d..2e498029e420b6 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "December 2022" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "January 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 4846826ebbe975..0828d2ea7ba317 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "December 2022" "" "" +.TH "NPMRC" "5" "January 2023" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 8c6781b6a7703e..6d7c946a06110d 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "December 2022" "" "" +.TH "PACKAGE.JSON" "5" "January 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 4170a9cd3972f0..d10a93bc31ab82 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "December 2022" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "January 2023" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 9a0d98c4d5e0c2..6f8d20d6efc965 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "December 2022" "" "" +.TH "CONFIG" "7" "January 2023" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" @@ -161,7 +161,7 @@ Type: null, "restricted", or "public" .RE 0 .P -If do not want your scoped package to be publicly viewable (and installable) set \fB--access=restricted\fR. +If you do not want your scoped package to be publicly viewable (and installable) set \fB--access=restricted\fR. .P Unscoped packages can not be set to \fBrestricted\fR. .P @@ -216,7 +216,7 @@ Type: "legacy" or "web" .RE 0 .P -What authentication strategy to use with \fBlogin\fR. +What authentication strategy to use with \fBlogin\fR. Note that if an \fBotp\fR config is given, this value will always be set to \fBlegacy\fR. .SS "\fBbefore\fR" .RS 0 .IP \(bu 4 @@ -1445,7 +1445,7 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co .P By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object. .P -When such and override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure. .SS "\fBstrict-ssl\fR" .RS 0 .IP \(bu 4 @@ -1770,7 +1770,7 @@ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=sh .RE 0 .P -Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependendencies. Sets \fB--install-strategy=shallow\fR. +Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR. .SS "\fBinit.author.email\fR" .RS 0 .IP \(bu 4 diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index d10fc0e4cffeec..e3b0271a2601f7 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "December 2022" "" "" +.TH "QUERYING" "7" "January 2023" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index 2614a67657f15e..8b5184dcb662dc 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "December 2022" "" "" +.TH "DEVELOPERS" "7" "January 2023" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index c605c3a69c9250..f894a4b36fdfd8 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "December 2022" "" "" +.TH "LOGGING" "7" "January 2023" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index a87c11222e377f..1aefdbb4145658 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "December 2022" "" "" +.TH "ORGS" "7" "January 2023" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index f9da11f2c0d14d..9c376719ce9c53 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "December 2022" "" "" +.TH "PACKAGE-SPEC" "7" "January 2023" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index a3368a441ac0da..da418a424014e8 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "December 2022" "" "" +.TH "REGISTRY" "7" "January 2023" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" @@ -15,7 +15,7 @@ The npm public registry is powered by a CouchDB database, of which there is a pu .P The registry URL used is determined by the scope of the package (see npm help scope. If no scope is specified, the default registry is used, which is supplied by the \fB\fBregistry\fR config\fR \fI\(la/using-npm/config#registry\(ra\fR parameter. See npm help config, npm help npmrc, and npm help config for more on managing npm's configuration. Authentication configuration such as auth tokens and certificates are configured specifically scoped to an individual registry. See \fBAuth Related Configuration\fR \fI\(la/configuring-npm/npmrc#auth-related-configuration\(ra\fR .P -When the default registry is used in a package-lock or shrinkwrap is has the special meaning of "the currently configured registry". If you create a lock file while using the default registry you can switch to another registry and npm will install packages from the new registry, but if you create a lock file while using a custom registry packages will be installed from that registry even after you change to another registry. +When the default registry is used in a package-lock or shrinkwrap it has the special meaning of "the currently configured registry". If you create a lock file while using the default registry you can switch to another registry and npm will install packages from the new registry, but if you create a lock file while using a custom registry packages will be installed from that registry even after you change to another registry. .SS "Does npm send any information about me back to the registry?" .P Yes. diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index dbbeddaecad012..aea330f2f6260e 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "December 2022" "" "" +.TH "REMOVAL" "7" "January 2023" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" @@ -24,7 +24,7 @@ Usually, the above instructions are sufficient. That will remove npm, but leave .P If that doesn't work, or if you require more drastic measures, continue reading. .P -Note that this is only necessary for globally-installed packages. Local installs are completely contained within a project's \fBnode_modules\fR folder. Delete that folder, and everything is gone less a package's install script is particularly ill-behaved). +Note that this is only necessary for globally-installed packages. Local installs are completely contained within a project's \fBnode_modules\fR folder. Delete that folder, and everything is gone unless a package's install script is particularly ill-behaved. .P This assumes that you installed node and npm in the default place. If you configured node with a different \fB--prefix\fR, or installed npm with a different prefix setting, then adjust the paths accordingly, replacing \fB/usr/local\fR with your install prefix. .P diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 7edbff22b6ab5e..9b892290e9b5cd 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "December 2022" "" "" +.TH "SCOPE" "7" "January 2023" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 78756bf6d2dbe8..443d9430e76016 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "December 2022" "" "" +.TH "SCRIPTS" "7" "January 2023" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" @@ -62,7 +62,7 @@ Runs BEFORE the package is prepared and packed, ONLY on \fBnpm publish\fR. \fBprepack\fR .RS 0 .IP \(bu 4 -Runs BEFORE a tarball is packed (on "\fBnpm pack\fR", "\fBnpm publish\fR", and when installing a git dependencies). +Runs BEFORE a tarball is packed (on "\fBnpm pack\fR", "\fBnpm publish\fR", and when installing a git dependency). .IP \(bu 4 NOTE: "\fBnpm run pack\fR" is NOT the same as "\fBnpm pack\fR". "\fBnpm run pack\fR" is an arbitrary user defined script name, where as, "\fBnpm pack\fR" is a CLI defined command. .RE 0 diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 427e3eeb8469be..e0ec139dd3429c 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "December 2022" "" "" +.TH "WORKSPACES" "7" "January 2023" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 89584e5814784b..a9c4b4bc0bb6df 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -12,6 +12,7 @@ const { readdirScoped } = require('@npmcli/fs') const { lstat, readlink } = require('fs/promises') const { depth } = require('treeverse') const log = require('proc-log') +const { cleanUrl } = require('npm-registry-fetch') const { OK, @@ -1210,7 +1211,8 @@ This is a one-time fix-up, please be patient... if (this[_manifests].has(spec.raw)) { return this[_manifests].get(spec.raw) } else { - log.silly('fetch manifest', spec.raw) + const cleanRawSpec = cleanUrl(spec.rawSpec) + log.silly('fetch manifest', spec.raw.replace(spec.rawSpec, cleanRawSpec)) const p = pacote.manifest(spec, options) .then(mani => { this[_manifests].set(spec.raw, mani) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index 947659f177eefe..b2a6ec2315a4f1 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -1,5 +1,6 @@ // mixin providing the loadVirtual method const localeCompare = require('@isaacs/string-locale-compare')('en') +const mapWorkspaces = require('@npmcli/map-workspaces') const { resolve } = require('path') @@ -21,7 +22,6 @@ const loadRoot = Symbol('loadRoot') const loadNode = Symbol('loadVirtualNode') const loadLink = Symbol('loadVirtualLink') const loadWorkspaces = Symbol.for('loadWorkspaces') -const loadWorkspacesVirtual = Symbol.for('loadWorkspacesVirtual') const flagsSuspect = Symbol.for('flagsSuspect') const reCalcDepFlags = Symbol('reCalcDepFlags') const checkRootEdges = Symbol('checkRootEdges') @@ -157,7 +157,7 @@ module.exports = cls => class VirtualLoader extends cls { } const lockWS = [] - const workspaces = this[loadWorkspacesVirtual]({ + const workspaces = mapWorkspaces.virtual({ cwd: this.path, lockfile: s.data, }) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js index 0a7965ae30ca1e..effa5a0cda24fa 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js @@ -1,33 +1,19 @@ const mapWorkspaces = require('@npmcli/map-workspaces') -const _appendWorkspaces = Symbol('appendWorkspaces') // shared ref used by other mixins/Arborist const _loadWorkspaces = Symbol.for('loadWorkspaces') -const _loadWorkspacesVirtual = Symbol.for('loadWorkspacesVirtual') module.exports = cls => class MapWorkspaces extends cls { - [_appendWorkspaces] (node, workspaces) { - if (node && workspaces.size) { - node.workspaces = workspaces - } - - return node - } - async [_loadWorkspaces] (node) { - if (node.workspaces) { - return node - } - const workspaces = await mapWorkspaces({ cwd: node.path, pkg: node.package, }) - return this[_appendWorkspaces](node, workspaces) - } + if (node && workspaces.size) { + node.workspaces = workspaces + } - [_loadWorkspacesVirtual] (opts) { - return mapWorkspaces.virtual(opts) + return node } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/override-resolves.js b/deps/npm/node_modules/@npmcli/arborist/lib/override-resolves.js index 794b2c335dc628..c061cbce186786 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/override-resolves.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/override-resolves.js @@ -1,4 +1,4 @@ -function overrideResolves (resolved, opts = {}) { +function overrideResolves (resolved, opts) { const { omitLockfileRegistryResolved = false } = opts if (omitLockfileRegistryResolved) { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/override-set.js b/deps/npm/node_modules/@npmcli/arborist/lib/override-set.js index 742e3f08ec5345..bfc5a5d7906ee5 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/override-set.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/override-set.js @@ -50,9 +50,36 @@ class OverrideSet { continue } - if (semver.intersects(edge.spec, rule.keySpec)) { + // if keySpec is * we found our override + if (rule.keySpec === '*') { return rule } + + let spec = npa(`${edge.name}@${edge.spec}`) + if (spec.type === 'alias') { + spec = spec.subSpec + } + + if (spec.type === 'git') { + if (spec.gitRange && semver.intersects(spec.gitRange, rule.keySpec)) { + return rule + } + + continue + } + + if (spec.type === 'range' || spec.type === 'version') { + if (semver.intersects(spec.fetchSpec, rule.keySpec)) { + return rule + } + + continue + } + + // if we got this far, the spec type is one of tag, directory or file + // which means we have no real way to make version comparisons, so we + // just accept the override + return rule } return this diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js index 16a0095fa09631..e757d0c38a6d70 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js @@ -9,6 +9,7 @@ const localeCompare = require('@isaacs/string-locale-compare')('en') const log = require('proc-log') +const { cleanUrl } = require('npm-registry-fetch') const deepestNestingTarget = require('./deepest-nesting-target.js') const CanPlaceDep = require('./can-place-dep.js') const { @@ -187,7 +188,7 @@ class PlaceDep { `${this.dep.name}@${this.dep.version}`, this.canPlace.description, `for: ${this.edge.from.package._id || this.edge.from.location}`, - `want: ${this.edge.spec || '*'}` + `want: ${cleanUrl(this.edge.spec || '*')}` ) const placementType = this.canPlace.canPlace === CONFLICT diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 075fb93b916b08..a7e8132123fba0 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "6.1.5", + "version": "6.1.6", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", @@ -42,9 +42,9 @@ "@npmcli/template-oss": "4.11.0", "benchmark": "^2.1.4", "chalk": "^4.1.0", - "minify-registry-metadata": "^2.1.0", + "minify-registry-metadata": "^3.0.0", "nock": "^13.2.0", - "tap": "^16.0.1", + "tap": "^16.3.2", "tcompare": "^5.0.6" }, "scripts": { @@ -81,7 +81,6 @@ "tap": { "color": true, "after": "test/fixtures/cleanup.js", - "coverage-map": "map.js", "test-env": [ "NODE_OPTIONS=--no-warnings", "LC_ALL=sk" diff --git a/deps/npm/node_modules/@npmcli/config/lib/index.js b/deps/npm/node_modules/@npmcli/config/lib/index.js index e1d47ffcd37362..1ddf2678391959 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/index.js +++ b/deps/npm/node_modules/@npmcli/config/lib/index.js @@ -17,6 +17,14 @@ const { mkdir, } = require('fs/promises') +const fileExists = (...p) => stat(resolve(...p)) + .then((st) => st.isFile()) + .catch(() => false) + +const dirExists = (...p) => stat(resolve(...p)) + .then((st) => st.isDirectory()) + .catch(() => false) + const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) @@ -90,6 +98,7 @@ class Config { platform = process.platform, execPath = process.execPath, cwd = process.cwd(), + excludeNpmCwd = false, }) { // turn the definitions into nopt's weirdo syntax this.definitions = definitions @@ -117,10 +126,12 @@ class Config { this.execPath = execPath this.platform = platform this.cwd = cwd + this.excludeNpmCwd = excludeNpmCwd // set when we load configs this.globalPrefix = null this.localPrefix = null + this.localPackage = null // defaults to env.HOME, but will always be *something* this.home = null @@ -311,15 +322,11 @@ class Config { // default the globalconfig file to that location, instead of the default // global prefix. It's weird that `npm get globalconfig --prefix=/foo` // returns `/foo/etc/npmrc`, but better to not change it at this point. - settableGetter(data, 'globalconfig', () => - resolve(this[_get]('prefix'), 'etc/npmrc')) + settableGetter(data, 'globalconfig', () => resolve(this[_get]('prefix'), 'etc/npmrc')) } loadHome () { - if (this.env.HOME) { - return this.home = this.env.HOME - } - this.home = homedir() + this.home = this.env.HOME || homedir() } loadGlobalPrefix () { @@ -330,7 +337,7 @@ class Config { if (this.env.PREFIX) { this.globalPrefix = this.env.PREFIX } else if (this.platform === 'win32') { - // c:\node\node.exe --> prefix=c:\node\ + // c:\node\node.exe --> prefix=c:\node\ this.globalPrefix = dirname(this.execPath) } else { // /usr/local/bin/node --> prefix=/usr/local @@ -599,6 +606,12 @@ class Config { // we return to make sure localPrefix is set await this.loadLocalPrefix() + // if we have not detected a local package json yet, try now that we + // have a local prefix + if (this.localPackage == null) { + this.localPackage = await fileExists(this.localPrefix, 'package.json') + } + if (this[_get]('global') === true || this[_get]('location') === 'global') { this.data.get('project').source = '(global mode enabled, ignored)' this.sources.set(this.data.get('project').source, 'project') @@ -630,16 +643,17 @@ class Config { const isGlobal = this[_get]('global') || this[_get]('location') === 'global' for (const p of walkUp(this.cwd)) { - const hasNodeModules = await stat(resolve(p, 'node_modules')) - .then((st) => st.isDirectory()) - .catch(() => false) + // HACK: this is an option set in tests to stop the local prefix from being set + // on tests that are created inside the npm repo + if (this.excludeNpmCwd && p === this.npmPath) { + break + } - const hasPackageJson = await stat(resolve(p, 'package.json')) - .then((st) => st.isFile()) - .catch(() => false) + const hasPackageJson = await fileExists(p, 'package.json') - if (!this.localPrefix && (hasNodeModules || hasPackageJson)) { + if (!this.localPrefix && (hasPackageJson || await dirExists(p, 'node_modules'))) { this.localPrefix = p + this.localPackage = hasPackageJson // if workspaces are disabled, or we're in global mode, return now if (cliWorkspaces === false || isGlobal) { @@ -663,11 +677,7 @@ class Config { for (const w of workspaces.values()) { if (w === this.localPrefix) { // see if there's a .npmrc file in the workspace, if so log a warning - const hasNpmrc = await stat(resolve(this.localPrefix, '.npmrc')) - .then((st) => st.isFile()) - .catch(() => false) - - if (hasNpmrc) { + if (await fileExists(this.localPrefix, '.npmrc')) { log.warn(`ignoring workspace config at ${this.localPrefix}/.npmrc`) } @@ -675,6 +685,7 @@ class Config { const { data } = this.data.get('default') data.workspace = [this.localPrefix] this.localPrefix = p + this.localPackage = hasPackageJson log.info(`found workspace root at ${this.localPrefix}`) // we found a root, so we return now return diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 28074afe686bf5..50d860c1c941e9 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "6.1.0", + "version": "6.1.1", "files": [ "bin/", "lib/" @@ -34,7 +34,7 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "dependencies": { "@npmcli/map-workspaces": "^3.0.0", diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index 8f89d4b2711b94..1e27f79597c021 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -19,7 +19,7 @@ "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.11.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 8979bc919919ad..9e7e3bdb41fc5c 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "5.0.6", + "version": "5.0.7", "description": "The registry diff", "repository": { "type": "git", @@ -44,10 +44,10 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "dependencies": { - "@npmcli/arborist": "^6.1.5", + "@npmcli/arborist": "^6.1.6", "@npmcli/disparity-colors": "^3.0.0", "@npmcli/installed-package-contents": "^2.0.0", "binary-extensions": "^2.2.0", diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index cc77321caa160a..c0092d4c8767b4 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "5.0.6", + "version": "5.0.7", "files": [ "bin/", "lib/" @@ -56,12 +56,12 @@ "bin-links": "^4.0.1", "just-extend": "^6.1.1", "just-safe-set": "^4.1.1", - "minify-registry-metadata": "^2.2.0", + "minify-registry-metadata": "^3.0.0", "mkdirp": "^1.0.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "dependencies": { - "@npmcli/arborist": "^6.1.5", + "@npmcli/arborist": "^6.1.6", "@npmcli/run-script": "^6.0.0", "chalk": "^4.1.0", "ci-info": "^3.7.0", diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index f7fca06d4fc68a..c0de224fba7ef1 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "4.0.6", + "version": "4.0.7", "main": "lib/index.js", "files": [ "bin/", @@ -43,10 +43,10 @@ "devDependencies": { "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "dependencies": { - "@npmcli/arborist": "^6.1.5" + "@npmcli/arborist": "^6.1.6" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 606dfc7d9e3e98..b157f97e685b47 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -39,7 +39,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index c0b1a09ee24018..529a7ff9d2c97f 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -31,7 +31,7 @@ "@npmcli/template-oss": "4.11.0", "minipass": "^4.0.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index 5ad21875fd61eb..035edaa9808d5a 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "5.0.6", + "version": "5.0.7", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -26,7 +26,7 @@ "@npmcli/template-oss": "4.11.0", "nock": "^13.0.7", "spawk": "^1.7.1", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", @@ -36,7 +36,7 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^6.1.5", + "@npmcli/arborist": "^6.1.6", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", "pacote": "^15.0.7" diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index eece07bee2341b..c293d566d1dc28 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -25,11 +25,11 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", + "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.11.0", - "libnpmpack": "^5.0.6", "lodash.clonedeep": "^4.5.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 6d22ce3568409f..e0d67afbbf66dc 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -28,7 +28,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 9154a1299157df..b3444c77b8dcfb 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -18,7 +18,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", "nock": "^13.2.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index d77bf51e1ba7a0..ff3855ae6c1a32 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -34,7 +34,7 @@ "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.11.0", "require-inject": "^1.4.4", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "dependencies": { "@npmcli/git": "^4.0.1", diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/LICENSE b/deps/npm/node_modules/minipass-fetch/node_modules/minipass/LICENSE deleted file mode 100644 index bf1dece2e1f122..00000000000000 --- a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.d.ts b/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.d.ts deleted file mode 100644 index 65faf63686c213..00000000000000 --- a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.d.ts +++ /dev/null @@ -1,155 +0,0 @@ -/// -import { EventEmitter } from 'events' -import { Stream } from 'stream' - -declare namespace Minipass { - type Encoding = BufferEncoding | 'buffer' | null - - interface Writable extends EventEmitter { - end(): any - write(chunk: any, ...args: any[]): any - } - - interface Readable extends EventEmitter { - pause(): any - resume(): any - pipe(): any - } - - interface Pipe { - src: Minipass - dest: Writable - opts: PipeOptions - } - - type DualIterable = Iterable & AsyncIterable - - type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string - - type BufferOrString = Buffer | string - - interface StringOptions { - encoding: BufferEncoding - objectMode?: boolean - async?: boolean - } - - interface BufferOptions { - encoding?: null | 'buffer' - objectMode?: boolean - async?: boolean - } - - interface ObjectModeOptions { - objectMode: true - async?: boolean - } - - interface PipeOptions { - end?: boolean - proxyErrors?: boolean - } - - type Options = T extends string - ? StringOptions - : T extends Buffer - ? BufferOptions - : ObjectModeOptions -} - -declare class Minipass< - RType extends any = Buffer, - WType extends any = RType extends Minipass.BufferOrString - ? Minipass.ContiguousData - : RType - > - extends Stream - implements Minipass.DualIterable -{ - static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable - - readonly bufferLength: number - readonly flowing: boolean - readonly writable: boolean - readonly readable: boolean - readonly paused: boolean - readonly emittedEnd: boolean - readonly destroyed: boolean - - /** - * Not technically private or readonly, but not safe to mutate. - */ - private readonly buffer: RType[] - private readonly pipes: Minipass.Pipe[] - - /** - * Technically writable, but mutating it can change the type, - * so is not safe to do in TypeScript. - */ - readonly objectMode: boolean - async: boolean - - /** - * Note: encoding is not actually read-only, and setEncoding(enc) - * exists. However, this type definition will insist that TypeScript - * programs declare the type of a Minipass stream up front, and if - * that type is string, then an encoding MUST be set in the ctor. If - * the type is Buffer, then the encoding must be missing, or set to - * 'buffer' or null. If the type is anything else, then objectMode - * must be set in the constructor options. So there is effectively - * no allowed way that a TS program can set the encoding after - * construction, as doing so will destroy any hope of type safety. - * TypeScript does not provide many options for changing the type of - * an object at run-time, which is what changing the encoding does. - */ - readonly encoding: Minipass.Encoding - // setEncoding(encoding: Encoding): void - - // Options required if not reading buffers - constructor( - ...args: RType extends Buffer - ? [] | [Minipass.Options] - : [Minipass.Options] - ) - - write(chunk: WType, cb?: () => void): boolean - write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean - read(size?: number): RType - end(cb?: () => void): this - end(chunk: any, cb?: () => void): this - end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this - pause(): void - resume(): void - promise(): Promise - collect(): Promise - - concat(): RType extends Minipass.BufferOrString ? Promise : never - destroy(er?: any): void - pipe(dest: W, opts?: Minipass.PipeOptions): W - unpipe(dest: W): void - - /** - * alias for on() - */ - addEventHandler(event: string, listener: (...args: any[]) => any): this - - on(event: string, listener: (...args: any[]) => any): this - on(event: 'data', listener: (chunk: RType) => any): this - on(event: 'error', listener: (error: any) => any): this - on( - event: - | 'readable' - | 'drain' - | 'resume' - | 'end' - | 'prefinish' - | 'finish' - | 'close', - listener: () => any - ): this - - [Symbol.iterator](): Iterator - [Symbol.asyncIterator](): AsyncIterator -} - -export = Minipass diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.js deleted file mode 100644 index e8797aab6cc276..00000000000000 --- a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/index.js +++ /dev/null @@ -1,649 +0,0 @@ -'use strict' -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, -} -const EE = require('events') -const Stream = require('stream') -const SD = require('string_decoder').StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } - - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding (enc) { - this.encoding = enc - } - - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } - - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } - - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - - if (!encoding) - encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) - n = null - - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - - return chunk - } - - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } - - resume () { - return this[RESUME]() - } - - pause () { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed () { - return this[DESTROYED] - } - - get flowing () { - return this[FLOWING] - } - - get paused () { - return this[PAUSED] - } - - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } - - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() - } - - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } - - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } - - pipe (dest, opts) { - if (this[DESTROYED]) - return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() - } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() - } - - return dest - } - - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() - } - } - - addListener (ev, fn) { - return this.on(ev, fn) - } - - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd () { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } - - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND] () { - if (this[EMITTED_END]) - return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() - } - - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this.pipes) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } - - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } - - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - - return this - } - - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } -} diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minipass/package.json deleted file mode 100644 index 548d03fa6d5d4b..00000000000000 --- a/deps/npm/node_modules/minipass-fetch/node_modules/minipass/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "minipass", - "version": "3.3.6", - "description": "minimal implementation of a PassThrough stream", - "main": "index.js", - "types": "index.d.ts", - "dependencies": { - "yallist": "^4.0.0" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typescript": "^4.7.3" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json index 45bd36ae719fb9..fc6f88473317a9 100644 --- a/deps/npm/node_modules/minipass-fetch/package.json +++ b/deps/npm/node_modules/minipass-fetch/package.json @@ -1,10 +1,11 @@ { "name": "minipass-fetch", - "version": "3.0.0", + "version": "3.0.1", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", "test": "tap", "snap": "tap", "lint": "eslint \"**/*.js\"", @@ -22,8 +23,8 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.10.0", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -35,7 +36,7 @@ "tap": "^16.0.0" }, "dependencies": { - "minipass": "^3.1.6", + "minipass": "^4.0.0", "minipass-sized": "^1.0.3", "minizlib": "^2.1.2" }, @@ -62,6 +63,6 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.10.0" } } diff --git a/deps/npm/package.json b/deps/npm/package.json index ebfcc0eec29ed3..73c41bc2b4fd29 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "9.2.0", + "version": "9.3.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -53,8 +53,8 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^6.1.5", - "@npmcli/config": "^6.1.0", + "@npmcli/arborist": "^6.1.6", + "@npmcli/config": "^6.1.1", "@npmcli/map-workspaces": "^3.0.0", "@npmcli/package-json": "^3.0.0", "@npmcli/run-script": "^6.0.0", @@ -76,12 +76,12 @@ "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.1", - "libnpmdiff": "^5.0.6", - "libnpmexec": "^5.0.6", - "libnpmfund": "^4.0.6", + "libnpmdiff": "^5.0.7", + "libnpmexec": "^5.0.7", + "libnpmfund": "^4.0.7", "libnpmhook": "^9.0.1", "libnpmorg": "^5.0.1", - "libnpmpack": "^5.0.6", + "libnpmpack": "^5.0.7", "libnpmpublish": "^7.0.6", "libnpmsearch": "^6.0.1", "libnpmteam": "^5.0.1", @@ -110,7 +110,6 @@ "read": "~1.0.7", "read-package-json": "^6.0.0", "read-package-json-fast": "^3.0.1", - "rimraf": "^3.0.2", "semver": "^7.3.8", "ssri": "^10.0.1", "tar": "^6.1.13", @@ -180,7 +179,6 @@ "read", "read-package-json", "read-package-json-fast", - "rimraf", "semver", "ssri", "tar", @@ -199,14 +197,14 @@ "@npmcli/mock-registry": "^1.0.0", "@npmcli/promise-spawn": "^6.0.1", "@npmcli/template-oss": "4.11.0", - "licensee": "^9.0.0", + "licensee": "^10.0.0", "nock": "^13.2.4", "npm-packlist": "^7.0.4", "remark": "^14.0.2", "remark-gfm": "^3.0.1", "remark-github": "^11.2.4", "spawk": "^1.7.1", - "tap": "^16.0.1" + "tap": "^16.3.2" }, "scripts": { "dependencies": "node scripts/bundle-and-gitignore-deps.js && node scripts/dependency-graph.js", diff --git a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs index 3e7658c14bb195..9262e0b51aa2dd 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs @@ -123,7 +123,7 @@ audited 1 package in xxx 1 package has an invalid registry signature: -@npmcli/arborist@1.0.14 (https://verdaccio-clone.org) +@npmcli/arborist@1.0.14 (https://verdaccio-clone.org/) Someone might have tampered with this package since it was published on the registry! @@ -134,7 +134,7 @@ audited 1 package in xxx 1 package has a missing registry signature but the registry is providing signing keys: -@npmcli/arborist@1.0.14 (https://verdaccio-clone.org) +@npmcli/arborist@1.0.14 (https://verdaccio-clone.org/) ` exports[`test/lib/commands/audit.js TAP audit signatures third-party registry with keys and signatures > must match snapshot 1`] = ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs index 2c2646bb299dca..667a7c78b385dc 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs @@ -354,7 +354,6 @@ exports[`test/lib/commands/config.js TAP config list with publishConfig > output ; "cli" config from command line options cache = "{NPMDIR}/test/lib/commands/tap-testdir-config-config-list-with-publishConfig-sandbox/cache" -location = "project" prefix = "{LOCALPREFIX}" userconfig = "{HOME}/.npmrc" diff --git a/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs new file mode 100644 index 00000000000000..533b4f196e6616 --- /dev/null +++ b/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs @@ -0,0 +1,88 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/commands/diff.js TAP no args in a project dir > must match snapshot 1`] = ` +diff --git a/a.js b/a.js +index v0.1.0..v1.0.0 100644 +--- a/a.js ++++ b/a.js +@@ -1,1 +1,1 @@ +-const a = "a@0.1.0" ++const a = "a@1.0.0" +diff --git a/b.js b/b.js +index v0.1.0..v1.0.0 100644 +--- a/b.js ++++ b/b.js +@@ -1,1 +1,1 @@ +-const b = "b@0.1.0" ++const b = "b@1.0.0" +diff --git a/index.js b/index.js +index v0.1.0..v1.0.0 100644 +--- a/index.js ++++ b/index.js +@@ -1,1 +1,1 @@ +-const version = "0.1.0" ++const version = "1.0.0" +diff --git a/package.json b/package.json +index v0.1.0..v1.0.0 100644 +--- a/package.json ++++ b/package.json +@@ -1,4 +1,4 @@ + { + "name": "foo", +- "version": "0.1.0" ++ "version": "1.0.0" + } +` + +exports[`test/lib/commands/diff.js TAP single arg version, filtering by files > must match snapshot 1`] = ` +diff --git a/a.js b/a.js +index v0.1.0..v1.0.0 100644 +--- a/a.js ++++ b/a.js +@@ -1,1 +1,1 @@ +-const a = "a@0.1.0" ++const a = "a@1.0.0" +diff --git a/b.js b/b.js +index v0.1.0..v1.0.0 100644 +--- a/b.js ++++ b/b.js +@@ -1,1 +1,1 @@ +-const b = "b@0.1.0" ++const b = "b@1.0.0" +` + +exports[`test/lib/commands/diff.js TAP various options using --name-only option > must match snapshot 1`] = ` +index.js +package.json +` + +exports[`test/lib/commands/diff.js TAP various options using diff option > must match snapshot 1`] = ` +diff --git a/index.js b/index.js +index v2.0.0..v3.0.0 100644 +--- a/index.js ++++ b/index.js +@@ -18,7 +18,7 @@ + 17 + 18 + 19 +-202.0.0 ++203.0.0 + 21 + 22 + 23 +diff --git a/package.json b/package.json +index v2.0.0..v3.0.0 100644 +--- a/package.json ++++ b/package.json +@@ -1,4 +1,4 @@ + { + "name": "bar", +- "version": "2.0.0" ++ "version": "3.0.0" + } +` diff --git a/deps/npm/tap-snapshots/test/lib/commands/dist-tag.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/dist-tag.js.test.cjs index 2b75899e4e3a6f..ebc823e7e06bbd 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/dist-tag.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/dist-tag.js.test.cjs @@ -11,7 +11,6 @@ exports[`test/lib/commands/dist-tag.js TAP add new tag > should return success m exports[`test/lib/commands/dist-tag.js TAP add using valid semver range as name > should return success msg 1`] = ` dist-tag add 1.0.0 to @scoped/another@7.7.7 - ` exports[`test/lib/commands/dist-tag.js TAP ls in current package > should list available tags for current package 1`] = ` @@ -22,7 +21,6 @@ latest: 1.0.0 exports[`test/lib/commands/dist-tag.js TAP ls on missing package > should log no dist-tag found msg 1`] = ` dist-tag ls Couldn't get dist-tag data for foo@* - ` exports[`test/lib/commands/dist-tag.js TAP ls on named package > should list tags for the specified package 1`] = ` @@ -45,7 +43,6 @@ latest: 2.0.0 exports[`test/lib/commands/dist-tag.js TAP remove existing tag > should log remove info 1`] = ` dist-tag del c from @scoped/another - ` exports[`test/lib/commands/dist-tag.js TAP remove existing tag > should return success msg 1`] = ` @@ -55,13 +52,11 @@ exports[`test/lib/commands/dist-tag.js TAP remove existing tag > should return s exports[`test/lib/commands/dist-tag.js TAP remove non-existing tag > should log error msg 1`] = ` dist-tag del nonexistent from @scoped/another dist-tag del nonexistent is not a dist-tag on @scoped/another - ` exports[`test/lib/commands/dist-tag.js TAP set existing version > should log warn msg 1`] = ` dist-tag add b to @scoped/another@0.6.0 dist-tag add b is already set to version 0.6.0 - ` exports[`test/lib/commands/dist-tag.js TAP workspaces no args > printed the expected output 1`] = ` @@ -95,7 +90,7 @@ latest-a: 1.0.0 latest: 1.0.0 ` -exports[`test/lib/commands/dist-tag.js TAP workspaces one arg -- . > printed the expected output 1`] = ` +exports[`test/lib/commands/dist-tag.js TAP workspaces one arg -- .@1, ignores version spec > printed the expected output 1`] = ` workspace-a: latest-a: 1.0.0 latest: 1.0.0 @@ -107,7 +102,7 @@ latest-c: 3.0.0 latest: 3.0.0 ` -exports[`test/lib/commands/dist-tag.js TAP workspaces one arg -- .@1, ignores version spec > printed the expected output 1`] = ` +exports[`test/lib/commands/dist-tag.js TAP workspaces one arg -- cwd > printed the expected output 1`] = ` workspace-a: latest-a: 1.0.0 latest: 1.0.0 @@ -131,7 +126,7 @@ latest-c: 3.0.0 latest: 3.0.0 ` -exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, . > printed the expected output 1`] = ` +exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, .@1, ignores version spec > printed the expected output 1`] = ` workspace-a: latest-a: 1.0.0 latest: 1.0.0 @@ -143,7 +138,13 @@ latest-c: 3.0.0 latest: 3.0.0 ` -exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, .@1, ignores version spec > printed the expected output 1`] = ` +exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, @scoped/pkg, logs a warning and ignores workspaces > printed the expected output 1`] = ` +a: 0.0.1 +b: 0.5.0 +latest: 1.0.0 +` + +exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, cwd > printed the expected output 1`] = ` workspace-a: latest-a: 1.0.0 latest: 1.0.0 @@ -154,9 +155,3 @@ workspace-c: latest-c: 3.0.0 latest: 3.0.0 ` - -exports[`test/lib/commands/dist-tag.js TAP workspaces two args -- list, @scoped/pkg, logs a warning and ignores workspaces > printed the expected output 1`] = ` -a: 0.0.1 -b: 0.5.0 -latest: 1.0.0 -` diff --git a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs index 861aad3e255017..2cf8dca12bc66a 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs @@ -59,7 +59,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-all-clear/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -75,7 +75,7 @@ npm -v  ok  curren node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-all-clear-in-color/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -185,7 +185,7 @@ npm -v  not ok Error: unsupport node -v  not ok Error: unsupported proxy protocol: 'ssh:' npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-bad-proxy/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -201,7 +201,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-cacache-badContent/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -329,7 +329,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-cacache-missingContent/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -345,7 +345,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-cacache-reclaimedCount/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -482,7 +482,7 @@ Object { exports[`test/lib/commands/doctor.js TAP discrete checks invalid environment > output 1`] = ` Check  Value  Recommendation/Notes git executable in PATH  ok  /path/to/git -global bin folder in PATH not ok Error: Add {CWD}/test/lib/commands/tap-testdir-doctor-discrete-checks-invalid-environment/global/bin to your $PATH +global bin folder in PATH not ok Error: Add {CWD}/global/bin to your $PATH ` exports[`test/lib/commands/doctor.js TAP discrete checks permissions - not windows > logs 1`] = ` @@ -637,23 +637,23 @@ Object { "warn": Array [ Array [ "checkFilesPermission", - "error reading directory {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/cache", + "error reading directory {CWD}/cache", ], Array [ "checkFilesPermission", - "error reading directory {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/prefix/node_modules", + "error reading directory {CWD}/prefix/node_modules", ], Array [ "checkFilesPermission", - "error reading directory {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/global/lib/node_modules", + "error reading directory {CWD}/global/node_modules", ], Array [ "checkFilesPermission", - "error reading directory {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/prefix/node_modules/.bin", + "error reading directory {CWD}/prefix/node_modules/.bin", ], Array [ "checkFilesPermission", - "error reading directory {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/global/bin", + "error reading directory {CWD}/global/bin", ], ], } @@ -666,12 +666,12 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/global/bin -Perms check on cached files  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/cache (should be owned by current user) -Perms check on local node_modules  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/prefix/node_modules (should be owned by current user) -Perms check on global node_modules not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/global/lib/node_modules -Perms check on local bin folder  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/prefix/node_modules/.bin -Perms check on global bin folder  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-error-reading-directory/global/bin +global bin folder in PATH  ok  {CWD}/global/bin +Perms check on cached files  not ok Check the permissions of files in {CWD}/cache (should be owned by current user) +Perms check on local node_modules  not ok Check the permissions of files in {CWD}/prefix/node_modules (should be owned by current user) +Perms check on global node_modules not ok Check the permissions of files in {CWD}/global/node_modules +Perms check on local bin folder  not ok Check the permissions of files in {CWD}/prefix/node_modules/.bin +Perms check on global bin folder  not ok Check the permissions of files in {CWD}/global/bin Verify cache contents  ok  verified 0 tarballs ` @@ -682,8 +682,8 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-owner/global/bin -Perms check on cached files  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-owner/cache (should be owned by current user) +global bin folder in PATH  ok  {CWD}/global/bin +Perms check on cached files  not ok Check the permissions of files in {CWD}/cache (should be owned by current user) Perms check on local node_modules  ok   Perms check on global node_modules ok   Perms check on local bin folder  ok   @@ -737,7 +737,7 @@ Object { "warn": Array [ Array [ "checkFilesPermission", - "should be owner of {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-owner/cache/_cacache", + "should be owner of {CWD}/cache/_cacache", ], ], } @@ -750,12 +750,12 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/global/bin -Perms check on cached files  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/cache (should be owned by current user) -Perms check on local node_modules  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/prefix/node_modules (should be owned by current user) -Perms check on global node_modules not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/global/lib/node_modules -Perms check on local bin folder  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/prefix/node_modules/.bin -Perms check on global bin folder  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/global/bin +global bin folder in PATH  ok  {CWD}/global/bin +Perms check on cached files  not ok Check the permissions of files in {CWD}/cache (should be owned by current user) +Perms check on local node_modules  not ok Check the permissions of files in {CWD}/prefix/node_modules (should be owned by current user) +Perms check on global node_modules not ok Check the permissions of files in {CWD}/global/node_modules +Perms check on local bin folder  not ok Check the permissions of files in {CWD}/prefix/node_modules/.bin +Perms check on global bin folder  not ok Check the permissions of files in {CWD}/global/bin Verify cache contents  ok  verified 0 tarballs ` @@ -764,23 +764,23 @@ Object { "error": Array [ Array [ "checkFilesPermission", - "Missing permissions on {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/cache (expect: readable)", + "Missing permissions on {CWD}/cache (expect: readable)", ], Array [ "checkFilesPermission", - "Missing permissions on {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/prefix/node_modules (expect: readable, writable)", + "Missing permissions on {CWD}/prefix/node_modules (expect: readable, writable)", ], Array [ "checkFilesPermission", - "Missing permissions on {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/global/lib/node_modules (expect: readable)", + "Missing permissions on {CWD}/global/node_modules (expect: readable)", ], Array [ "checkFilesPermission", - "Missing permissions on {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/prefix/node_modules/.bin (expect: readable, writable, executable)", + "Missing permissions on {CWD}/prefix/node_modules/.bin (expect: readable, writable, executable)", ], Array [ "checkFilesPermission", - "Missing permissions on {CWD}/test/lib/commands/tap-testdir-doctor-incorrect-permissions/global/bin (expect: executable)", + "Missing permissions on {CWD}/global/bin (expect: executable)", ], ], "info": Array [ @@ -885,7 +885,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  not ok Error: Install git and ensure it's in your PATH. -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-missing-git/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -940,11 +940,11 @@ Object { "warn": Array [ Array [ "checkFilesPermission", - "error getting info for {CWD}/test/lib/commands/tap-testdir-doctor-missing-global-directories/global/lib/node_modules", + "error getting info for {CWD}/global/node_modules", ], Array [ "checkFilesPermission", - "error getting info for {CWD}/test/lib/commands/tap-testdir-doctor-missing-global-directories/global/bin", + "error getting info for {CWD}/global/bin", ], ], } @@ -957,12 +957,12 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-missing-global-directories/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   -Perms check on global node_modules not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-missing-global-directories/global/lib/node_modules +Perms check on global node_modules not ok Check the permissions of files in {CWD}/global/node_modules Perms check on local bin folder  ok   -Perms check on global bin folder  not ok Check the permissions of files in {CWD}/test/lib/commands/tap-testdir-doctor-missing-global-directories/global/bin +Perms check on global bin folder  not ok Check the permissions of files in {CWD}/global/bin Verify cache contents  ok  verified 0 tarballs ` @@ -1020,7 +1020,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-missing-local-node_modules/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1083,7 +1083,7 @@ npm -v  ok  current: v1.0.0, node -v  not ok Use node v2.0.1 (current: v2.0.0) npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-node-out-of-date---current/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1146,7 +1146,7 @@ npm -v  ok  current: v1.0.0, node -v  not ok Use node v1.0.0 (current: v0.0.1) npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-node-out-of-date---lts/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1209,7 +1209,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  not ok Try \`npm config set registry=https://registry.npmjs.org/\` git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-non-default-registry/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1272,7 +1272,7 @@ npm -v  not ok Use npm v2.0.0 node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-npm-out-of-date/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1335,7 +1335,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-ping-404/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1398,7 +1398,7 @@ npm -v  ok  curren node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-ping-404-in-color/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1461,7 +1461,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-ping-exception-with-code/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1524,7 +1524,7 @@ npm -v  ok  current: v1.0.0, node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH  ok  {CWD}/test/lib/commands/tap-testdir-doctor-ping-exception-without-code/global/bin +global bin folder in PATH  ok  {CWD}/global/bin Perms check on cached files  ok   Perms check on local node_modules  ok   Perms check on global node_modules ok   @@ -1643,5 +1643,5 @@ npm -v  ok  current: v1.0.0, latest: node -v  ok  current: v1.0.0, recommended: v1.0.0 npm config get registry  ok  using default registry (https://registry.npmjs.org/) git executable in PATH  ok  /path/to/git -global bin folder in PATH ok  {CWD}/test/lib/commands/tap-testdir-doctor-windows-skips-permissions-checks/global +global bin folder in PATH ok  {CWD}/global ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/fund.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/fund.js.test.cjs index f0df1e1c58ad46..011315a9211ef0 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/fund.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/fund.js.test.cjs @@ -8,8 +8,7 @@ exports[`test/lib/commands/fund.js TAP fund a package with type and multiple sources > should print prompt select message 1`] = ` 1: Foo funding available at the following URL: http://example.com/foo 2: Lorem funding available at the following URL: http://example.com/foo-lorem -Run \`npm fund [<@scope>/] --which=1\`, for example, to open the first funding URL listed in that package - +Run \`npm fund [] --which=1\`, for example, to open the first funding URL listed in that package ` exports[`test/lib/commands/fund.js TAP fund colors > should print output with color info 1`] = ` @@ -23,7 +22,6 @@ exports[`test/lib/commands/fund.js TAP fund colors > should print output with co  \`-- http://example.com/e  \`-- e@1.0.0  - ` exports[`test/lib/commands/fund.js TAP fund containing multi-level nested deps with no funding > should omit dependencies with no funding declared 1`] = ` @@ -33,54 +31,37 @@ nested-no-funding-packages@1.0.0 \`-- http://example.com/donate \`-- bar@1.0.0 - ` exports[`test/lib/commands/fund.js TAP fund in which same maintainer owns all its deps > should print stack packages together 1`] = ` http://example.com/donate \`-- maintainer-owns-all-deps@1.0.0, dep-foo@1.0.0, dep-sub-foo@1.0.0, dep-bar@1.0.0 - ` exports[`test/lib/commands/fund.js TAP fund pkg missing version number > should print name only 1`] = ` http://example.com/foo \`-- foo +` +exports[`test/lib/commands/fund.js TAP fund using bad which value: index too high > should print message about invalid which 1`] = ` +--which=100 is not a valid index +1: Funding available at the following URL: http://example.com +2: Funding available at the following URL: http://sponsors.example.com/me +3: Funding available at the following URL: http://collective.example.com +Run \`npm fund [] --which=1\`, for example, to open the first funding URL listed in that package ` exports[`test/lib/commands/fund.js TAP fund using nested packages with multiple sources > should prompt with all available URLs 1`] = ` 1: Funding available at the following URL: https://one.example.com 2: Funding available at the following URL: https://two.example.com -Run \`npm fund [<@scope>/] --which=1\`, for example, to open the first funding URL listed in that package - -` - -exports[`test/lib/commands/fund.js TAP fund using nested packages with multiple sources, with a source number > should open the numbered URL 1`] = ` -Funding available at the following URL: - https://one.example.com -` - -exports[`test/lib/commands/fund.js TAP fund using package argument > should open funding url 1`] = ` -individual funding available at the following URL: - http://example.com/donate -` - -exports[`test/lib/commands/fund.js TAP fund using pkg name while having conflicting versions > should open greatest version 1`] = ` -Funding available at the following URL: - http://example.com/2 -` - -exports[`test/lib/commands/fund.js TAP fund using string shorthand > should open string-only url 1`] = ` -Funding available at the following URL: - https://example.com/sponsor +Run \`npm fund [] --which=1\`, for example, to open the first funding URL listed in that package ` exports[`test/lib/commands/fund.js TAP fund with no package containing funding > should print empty funding info 1`] = ` no-funding-package@0.0.0 - ` exports[`test/lib/commands/fund.js TAP sub dep with fund info and a parent with no funding info > should nest sub dep as child of root 1`] = ` @@ -90,25 +71,22 @@ test-multiple-funding-sources@1.0.0 \`-- http://example.com/c \`-- c@1.0.0 - ` -exports[`test/lib/commands/fund.js TAP workspaces filter funding info by a specific workspace > should display only filtered workspace name and its deps 1`] = ` +exports[`test/lib/commands/fund.js TAP workspaces filter funding info by a specific workspace name > should display only filtered workspace name and its deps 1`] = ` workspaces-support@1.0.0 \`-- https://example.com/a | \`-- a@1.0.0 \`-- http://example.com/c \`-- c@1.0.0 - ` -exports[`test/lib/commands/fund.js TAP workspaces filter funding info by a specific workspace > should display only filtered workspace path and its deps 1`] = ` +exports[`test/lib/commands/fund.js TAP workspaces filter funding info by a specific workspace path > should display only filtered workspace name and its deps 1`] = ` workspaces-support@1.0.0 \`-- https://example.com/a | \`-- a@1.0.0 \`-- http://example.com/c \`-- c@1.0.0 - ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/init.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/init.js.test.cjs index 677f29a1ab52b0..821193a55e1a98 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/init.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/init.js.test.cjs @@ -5,55 +5,20 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/commands/init.js TAP npm init workspces with root > does not print helper info 1`] = ` -Array [] -` - -exports[`test/lib/commands/init.js TAP workspaces no args > should print helper info 1`] = ` -Array [] -` +exports[`test/lib/commands/init.js TAP displays output > displays helper info 1`] = ` +This utility will walk you through creating a package.json file. +It only covers the most common items, and tries to guess sensible defaults. -exports[`test/lib/commands/init.js TAP workspaces no args, existing folder > should print helper info 1`] = ` -Array [] -` +See \`npm help init\` for definitive documentation on these fields +and exactly what they do. -exports[`test/lib/commands/init.js TAP workspaces post workspace-init reify > should print helper info 1`] = ` -Array [ - Array [ - String( +Use \`npm install \` afterwards to install a package and +save it as a dependency in the package.json file. - added 1 package in 100ms - ), - ], -] +Press ^C at any time to quit. ` -exports[`test/lib/commands/init.js TAP workspaces post workspace-init reify > should reify tree on init ws complete 1`] = ` -{ - "name": "top-level", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "top-level", - "workspaces": [ - "a" - ] - }, - "a": { - "version": "1.0.0", - "license": "ISC", - "devDependencies": {} - }, - "node_modules/a": { - "resolved": "a", - "link": true - } - } -} - -` +exports[`test/lib/commands/init.js TAP workspaces no args -- yes > should print helper info 1`] = ` -exports[`test/lib/commands/init.js TAP workspaces with arg but missing workspace folder > should print helper info 1`] = ` -Array [] +added 1 package in {TIME} ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs index 0c34bd972dcf93..c26e30da1ef625 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/link.js.test.cjs @@ -6,50 +6,50 @@ */ 'use strict' exports[`test/lib/commands/link.js TAP hash character in working directory path > should create a global link to current pkg, even within path with hash 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-hash-character-in-working-directory-path/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/commands/tap-testdir-link-hash-character-in-working-directory-path/i_like_#_in_my_paths/test-pkg-link +{CWD}/global/node_modules/test-pkg-link -> {CWD}/other/i_like_#_in_my_paths/test-pkg-link ` exports[`test/lib/commands/link.js TAP link global linked pkg to local nm when using args > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/@myscope/bar -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/scoped-linked -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/a -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/global-prefix/lib/node_modules/a -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/link-me-too -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/link-me-too -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/my-project/node_modules/test-pkg-link -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-nm-when-using-args/test-pkg-link +{CWD}/prefix/node_modules/@myscope/bar -> {CWD}/global/node_modules/@myscope/bar +{CWD}/prefix/node_modules/@myscope/linked -> {CWD}/other/scoped-linked +{CWD}/prefix/node_modules/a -> {CWD}/global/node_modules/a +{CWD}/prefix/node_modules/link-me-too -> {CWD}/other/link-me-too +{CWD}/prefix/node_modules/test-pkg-link -> {CWD}/other/test-pkg-link ` exports[`test/lib/commands/link.js TAP link global linked pkg to local workspace using args > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/@myscope/bar -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/global-prefix/lib/node_modules/@myscope/bar -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/scoped-linked -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/a -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/global-prefix/lib/node_modules/a -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/link-me-too -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/link-me-too -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/test-pkg-link -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/test-pkg-link -{CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/node_modules/x -> {CWD}/test/lib/commands/tap-testdir-link-link-global-linked-pkg-to-local-workspace-using-args/my-project/packages/x +{CWD}/prefix/node_modules/@myscope/bar -> {CWD}/global/node_modules/@myscope/bar +{CWD}/prefix/node_modules/@myscope/linked -> {CWD}/other/scoped-linked +{CWD}/prefix/node_modules/a -> {CWD}/global/node_modules/a +{CWD}/prefix/node_modules/link-me-too -> {CWD}/other/link-me-too +{CWD}/prefix/node_modules/test-pkg-link -> {CWD}/other/test-pkg-link +{CWD}/prefix/node_modules/x -> {CWD}/prefix/packages/x ` exports[`test/lib/commands/link.js TAP link pkg already in global space > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-pkg-already-in-global-space/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/commands/tap-testdir-link-link-pkg-already-in-global-space/scoped-linked +{CWD}/prefix/node_modules/@myscope/linked -> {CWD}/other/scoped-linked ` exports[`test/lib/commands/link.js TAP link pkg already in global space when prefix is a symlink > should create a local symlink to global pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/my-project/node_modules/@myscope/linked -> {CWD}/test/lib/commands/tap-testdir-link-link-pkg-already-in-global-space-when-prefix-is-a-symlink/scoped-linked +{CWD}/prefix/node_modules/@myscope/linked -> {CWD}/other/scoped-linked ` exports[`test/lib/commands/link.js TAP link to globalDir when in current working dir of pkg and no args > should create a global link to current pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/global-prefix/lib/node_modules/test-pkg-link -> {CWD}/test/lib/commands/tap-testdir-link-link-to-globalDir-when-in-current-working-dir-of-pkg-and-no-args/test-pkg-link +{CWD}/global/node_modules/test-pkg-link -> {CWD}/prefix ` exports[`test/lib/commands/link.js TAP link ws to globalDir when workspace specified and no args > should create a global link to current pkg 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-link-ws-to-globalDir-when-workspace-specified-and-no-args/global-prefix/lib/node_modules/a -> {CWD}/test/lib/commands/tap-testdir-link-link-ws-to-globalDir-when-workspace-specified-and-no-args/test-pkg-link/packages/a +{CWD}/global/node_modules/a -> {CWD}/prefix/packages/a ` exports[`test/lib/commands/link.js TAP test linked installed as symlinks > linked package should not be installed 1`] = ` -{CWD}/test/lib/commands/tap-testdir-link-test-linked-installed-as-symlinks/prefix/node_modules/mylink -> {CWD}/test/lib/commands/tap-testdir-link-test-linked-installed-as-symlinks/other/mylink +{CWD}/prefix/node_modules/mylink -> {CWD}/other/mylink ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs index 84bfed4c91500d..a6e4472cae95a1 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs @@ -7,53 +7,53 @@ 'use strict' exports[`test/lib/commands/ls.js TAP ignore missing optional deps --json > ls --json problems 1`] = ` Array [ - "invalid: optional-wrong@3.2.1 {project}/node_modules/optional-wrong", + "invalid: optional-wrong@3.2.1 {CWD}/prefix/node_modules/optional-wrong", "missing: peer-missing@1, required by test-npm-ls-ignore-missing-optional@1.2.3", - "invalid: peer-optional-wrong@3.2.1 {project}/node_modules/peer-optional-wrong", - "invalid: peer-wrong@3.2.1 {project}/node_modules/peer-wrong", + "invalid: peer-optional-wrong@3.2.1 {CWD}/prefix/node_modules/peer-optional-wrong", + "invalid: peer-wrong@3.2.1 {CWD}/prefix/node_modules/peer-wrong", "missing: prod-missing@1, required by test-npm-ls-ignore-missing-optional@1.2.3", - "invalid: prod-wrong@3.2.1 {project}/node_modules/prod-wrong", + "invalid: prod-wrong@3.2.1 {CWD}/prefix/node_modules/prod-wrong", ] ` exports[`test/lib/commands/ls.js TAP ignore missing optional deps --parseable > ls --parseable result 1`] = ` -{project} -{project}/node_modules/optional-ok -{project}/node_modules/optional-wrong -{project}/node_modules/peer-ok -{project}/node_modules/peer-optional-ok -{project}/node_modules/peer-optional-wrong -{project}/node_modules/peer-wrong -{project}/node_modules/prod-ok -{project}/node_modules/prod-wrong +{CWD}/prefix +{CWD}/prefix/node_modules/optional-ok +{CWD}/prefix/node_modules/optional-wrong +{CWD}/prefix/node_modules/peer-ok +{CWD}/prefix/node_modules/peer-optional-ok +{CWD}/prefix/node_modules/peer-optional-wrong +{CWD}/prefix/node_modules/peer-wrong +{CWD}/prefix/node_modules/prod-ok +{CWD}/prefix/node_modules/prod-wrong ` exports[`test/lib/commands/ls.js TAP ignore missing optional deps human output > ls result 1`] = ` -test-npm-ls-ignore-missing-optional@1.2.3 {project} -+-- unmet optional dependency optional-missing@1 +test-npm-ls-ignore-missing-optional@1.2.3 {CWD}/prefix ++-- UNMET OPTIONAL DEPENDENCY optional-missing@1 +-- optional-ok@1.2.3 +-- optional-wrong@3.2.1 invalid: "1" from the root project -+-- unmet dependency peer-missing@1 ++-- UNMET DEPENDENCY peer-missing@1 +-- peer-ok@1.2.3 -+-- unmet optional dependency peer-optional-missing@1 ++-- UNMET OPTIONAL DEPENDENCY peer-optional-missing@1 +-- peer-optional-ok@1.2.3 +-- peer-optional-wrong@3.2.1 invalid: "1" from the root project +-- peer-wrong@3.2.1 invalid: "1" from the root project -+-- unmet dependency prod-missing@1 ++-- UNMET DEPENDENCY prod-missing@1 +-- prod-ok@1.2.3 \`-- prod-wrong@3.2.1 invalid: "1" from the root project ` exports[`test/lib/commands/ls.js TAP ls --depth=0 > should output tree containing only top-level dependencies 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---depth-0 +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 \`-- foo@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --depth=1 > should output tree containing top-level deps and their deps only 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---depth-1 +test-npm-ls@1.0.0 {CWD}/prefix +-- a@1.0.0 | \`-- b@1.0.0 \`-- e@1.0.0 @@ -61,7 +61,7 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---depth-1 ` exports[`test/lib/commands/ls.js TAP ls --dev > should output tree containing dev deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---dev +test-npm-ls@1.0.0 {CWD}/prefix \`-- dev-dep@1.0.0 \`-- foo@1.0.0 \`-- dog@1.0.0 @@ -69,14 +69,14 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---dev ` exports[`test/lib/commands/ls.js TAP ls --link > should output tree containing linked deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---link +test-npm-ls@1.0.0 {CWD}/prefix \`-- linked-dep@1.0.0 -> ./linked-dep ` exports[`test/lib/commands/ls.js TAP ls --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` test-npm-ls@1.0.0 -| {CWD}/tap-testdir-ls-ls---long---depth-0 +| {CWD}/prefix | +-- chai@1.0.0 | @@ -93,7 +93,7 @@ test-npm-ls@1.0.0 exports[`test/lib/commands/ls.js TAP ls --long > should output tree info with descriptions 1`] = ` test-npm-ls@1.0.0 -| {CWD}/tap-testdir-ls-ls---long +| {CWD}/prefix | +-- chai@1.0.0 | @@ -115,192 +115,192 @@ test-npm-ls@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --depth=0 > should output tree containing only top-level dependencies 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---depth-0 -{CWD}/tap-testdir-ls-ls---parseable---depth-0/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable---depth-0/node_modules/foo +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo ` exports[`test/lib/commands/ls.js TAP ls --parseable --depth=1 > should output parseable containing top-level deps and their deps only 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---depth-1 -{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable---depth-1/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable --dev > should output tree containing dev deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---dev -{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/dev-dep -{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable---dev/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/dev-dep +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable --link > should output tree containing linked deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---link -{CWD}/tap-testdir-ls-ls---parseable---link/node_modules/linked-dep +{CWD}/prefix +{CWD}/prefix/node_modules/linked-dep ` exports[`test/lib/commands/ls.js TAP ls --parseable --long --depth=0 > should output tree containing top-level deps with descriptions 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0:test-npm-ls@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/chai:chai@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long---depth-0/node_modules/prod-dep:prod-dep@1.0.0 +{CWD}/prefix:test-npm-ls@1.0.0 +{CWD}/prefix/node_modules/chai:chai@1.0.0 +{CWD}/prefix/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/prefix/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/prefix/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/prefix/node_modules/prod-dep:prod-dep@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --long > should output tree info with descriptions 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---long:test-npm-ls@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/chai:chai@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/prod-dep:prod-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/foo:foo@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/prod-dep/node_modules/dog:dog@2.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long/node_modules/dog:dog@1.0.0 +{CWD}/prefix:test-npm-ls@1.0.0 +{CWD}/prefix/node_modules/chai:chai@1.0.0 +{CWD}/prefix/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/prefix/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/prefix/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/prefix/node_modules/prod-dep:prod-dep@1.0.0 +{CWD}/prefix/node_modules/foo:foo@1.0.0 +{CWD}/prefix/node_modules/prod-dep/node_modules/dog:dog@2.0.0 +{CWD}/prefix/node_modules/dog:dog@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --long missing/invalid/extraneous > should output parseable result containing EXTRANEOUS/INVALID labels 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous:test-npm-ls@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/chai:chai@1.0.0:EXTRANEOUS -{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/foo:foo@1.0.0:INVALID -{CWD}/tap-testdir-ls-ls---parseable---long-missing-invalid-extraneous/node_modules/dog:dog@1.0.0 +{CWD}/prefix:test-npm-ls@1.0.0 +{CWD}/prefix/node_modules/chai:chai@1.0.0:EXTRANEOUS +{CWD}/prefix/node_modules/foo:foo@1.0.0:INVALID +{CWD}/prefix/node_modules/dog:dog@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --long print symlink target location > should output parseable results with symlink targets 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location:test-npm-ls@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/chai:chai@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/dev-dep:dev-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/linked-dep:linked-dep@1.0.0:{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/linked-dep -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/optional-dep:optional-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/peer-dep:peer-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/prod-dep:prod-dep@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/foo:foo@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/prod-dep/node_modules/dog:dog@2.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-print-symlink-target-location/node_modules/dog:dog@1.0.0 +{CWD}/prefix:test-npm-ls@1.0.0 +{CWD}/prefix/node_modules/chai:chai@1.0.0 +{CWD}/prefix/node_modules/dev-dep:dev-dep@1.0.0 +{CWD}/prefix/node_modules/linked-dep:linked-dep@1.0.0:{CWD}/prefix/linked-dep +{CWD}/prefix/node_modules/optional-dep:optional-dep@1.0.0 +{CWD}/prefix/node_modules/peer-dep:peer-dep@1.0.0 +{CWD}/prefix/node_modules/prod-dep:prod-dep@1.0.0 +{CWD}/prefix/node_modules/foo:foo@1.0.0 +{CWD}/prefix/node_modules/prod-dep/node_modules/dog:dog@2.0.0 +{CWD}/prefix/node_modules/dog:dog@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --long with extraneous deps > should output long parseable output with extraneous info 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps:test-npm-ls@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/chai:chai@1.0.0:EXTRANEOUS -{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/foo:foo@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/dog:dog@1.0.0 +{CWD}/prefix:test-npm-ls@1.0.0 +{CWD}/prefix/node_modules/chai:chai@1.0.0:EXTRANEOUS +{CWD}/prefix/node_modules/foo:foo@1.0.0 +{CWD}/prefix/node_modules/dog:dog@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls --parseable --production > should output tree containing production deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---production -{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/optional-dep -{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/prod-dep -{CWD}/tap-testdir-ls-ls---parseable---production/node_modules/prod-dep/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/optional-dep +{CWD}/prefix/node_modules/prod-dep +{CWD}/prefix/node_modules/prod-dep/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable cycle deps > should print tree output omitting deduped ref 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-cycle-deps -{CWD}/tap-testdir-ls-ls---parseable-cycle-deps/node_modules/a -{CWD}/tap-testdir-ls-ls---parseable-cycle-deps/node_modules/b +{CWD}/prefix +{CWD}/prefix/node_modules/a +{CWD}/prefix/node_modules/b ` exports[`test/lib/commands/ls.js TAP ls --parseable default --depth value should be 0 > should output parseable output containing only top-level dependencies 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0 -{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-default---depth-value-should-be-0/node_modules/foo +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo ` exports[`test/lib/commands/ls.js TAP ls --parseable empty location > should print empty result 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-empty-location +{CWD}/prefix ` exports[`test/lib/commands/ls.js TAP ls --parseable extraneous deps > should output containing problems info 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps -{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable-extraneous-deps/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable from and resolved properties > should not be printed in tree output 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-from-and-resolved-properties -{CWD}/tap-testdir-ls-ls---parseable-from-and-resolved-properties/node_modules/simple-output +{CWD}/prefix +{CWD}/prefix/node_modules/simple-output ` exports[`test/lib/commands/ls.js TAP ls --parseable global > should print parseable output for global deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-global -{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/a -{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/b -{CWD}/tap-testdir-ls-ls---parseable-global/node_modules/b/node_modules/c +{CWD}/global +{CWD}/global/node_modules/a +{CWD}/global/node_modules/b +{CWD}/global/node_modules/b/node_modules/c ` exports[`test/lib/commands/ls.js TAP ls --parseable json read problems > should print empty result 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-json-read-problems +{CWD}/prefix ` exports[`test/lib/commands/ls.js TAP ls --parseable missing package.json > should output parseable missing name/version of top-level package 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-missing-package.json -{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/dog -{CWD}/tap-testdir-ls-ls---parseable-missing-package.json/node_modules/foo +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/dog +{CWD}/prefix/node_modules/foo ` exports[`test/lib/commands/ls.js TAP ls --parseable missing/invalid/extraneous > should output parseable containing top-level deps and their deps only 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous -{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable-missing-invalid-extraneous/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable no args > should output parseable representation of dependencies structure 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-no-args -{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable-no-args/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable overridden dep > should contain overridden outout 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-overridden-dep:test-overridden@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable-overridden-dep/node_modules/foo:foo@1.0.0 -{CWD}/tap-testdir-ls-ls---parseable-overridden-dep/node_modules/bar:bar@1.0.0:OVERRIDDEN +{CWD}/prefix:test-overridden@1.0.0 +{CWD}/prefix/node_modules/foo:foo@1.0.0 +{CWD}/prefix/node_modules/bar:bar@1.0.0:OVERRIDDEN ` exports[`test/lib/commands/ls.js TAP ls --parseable resolved points to git ref > should output tree containing git refs 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-resolved-points-to-git-ref -{CWD}/tap-testdir-ls-ls---parseable-resolved-points-to-git-ref/node_modules/abbrev +{CWD}/prefix +{CWD}/prefix/node_modules/abbrev ` exports[`test/lib/commands/ls.js TAP ls --parseable unmet optional dep > should output parseable with empty entry for missing optional deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/dev-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/optional-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/peer-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/prod-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/prod-dep/node_modules/dog -{CWD}/tap-testdir-ls-ls---parseable-unmet-optional-dep/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/dev-dep +{CWD}/prefix/node_modules/optional-dep +{CWD}/prefix/node_modules/peer-dep +{CWD}/prefix/node_modules/prod-dep +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/prod-dep/node_modules/dog +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable unmet peer dep > should output parseable signaling missing peer dep in problems 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/dev-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/optional-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/peer-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/prod-dep -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/prod-dep/node_modules/dog -{CWD}/tap-testdir-ls-ls---parseable-unmet-peer-dep/node_modules/dog +{CWD}/prefix +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/dev-dep +{CWD}/prefix/node_modules/optional-dep +{CWD}/prefix/node_modules/peer-dep +{CWD}/prefix/node_modules/prod-dep +{CWD}/prefix/node_modules/foo +{CWD}/prefix/node_modules/prod-dep/node_modules/dog +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable using aliases > should output tree containing aliases 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-using-aliases -{CWD}/tap-testdir-ls-ls---parseable-using-aliases/node_modules/a +{CWD}/prefix +{CWD}/prefix/node_modules/a ` exports[`test/lib/commands/ls.js TAP ls --parseable with filter arg > should output parseable contaning only occurrences of filtered by package 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-with-filter-arg/node_modules/chai +{CWD}/prefix/node_modules/chai ` exports[`test/lib/commands/ls.js TAP ls --parseable with filter arg nested dep > should output parseable contaning only occurrences of filtered package 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-with-filter-arg-nested-dep/node_modules/dog +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --parseable with missing filter arg > should output parseable output containing no dependencies info 1`] = ` @@ -308,12 +308,12 @@ exports[`test/lib/commands/ls.js TAP ls --parseable with missing filter arg > sh ` exports[`test/lib/commands/ls.js TAP ls --parseable with multiple filter args > should output parseable contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable-with-multiple-filter-args/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable-with-multiple-filter-args/node_modules/dog +{CWD}/prefix/node_modules/chai +{CWD}/prefix/node_modules/dog ` exports[`test/lib/commands/ls.js TAP ls --production > should output tree containing production deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---production +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 +-- optional-dep@1.0.0 \`-- prod-dep@1.0.0 @@ -322,13 +322,13 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---production ` exports[`test/lib/commands/ls.js TAP ls broken resolved field > should NOT print git refs in output tree 1`] = ` -npm-broken-resolved-field-test@1.0.0 {CWD}/tap-testdir-ls-ls-broken-resolved-field +npm-broken-resolved-field-test@1.0.0 {CWD}/prefix \`-- a@1.0.1 ` exports[`test/lib/commands/ls.js TAP ls colored output > should output tree containing color info 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-colored-output +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 extraneous +-- foo@1.0.0 invalid: "^2.0.0" from the root project | \`-- dog@1.0.0 @@ -337,7 +337,7 @@ exports[`test/lib/commands/ls.js TAP ls colored output > should output tree cont ` exports[`test/lib/commands/ls.js TAP ls cycle deps > should print tree output containing deduped ref 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-cycle-deps +test-npm-ls@1.0.0 {CWD}/prefix \`-- a@1.0.0 \`-- b@1.0.0 \`-- a@1.0.0 deduped @@ -345,7 +345,7 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-cycle-deps ` exports[`test/lib/commands/ls.js TAP ls cycle deps with filter args > should print tree output containing deduped ref 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-cycle-deps-with-filter-args +test-npm-ls@1.0.0 {CWD}/prefix \`-- a@1.0.0  \`-- b@1.0.0  \`-- a@1.0.0 deduped @@ -353,7 +353,7 @@ exports[`test/lib/commands/ls.js TAP ls cycle deps with filter args > should pri ` exports[`test/lib/commands/ls.js TAP ls deduped missing dep > should output parseable signaling missing peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-deduped-missing-dep +test-npm-ls@1.0.0 {CWD}/prefix +-- a@1.0.0 | \`-- UNMET DEPENDENCY b@^1.0.0 \`-- UNMET DEPENDENCY b@^1.0.0 @@ -361,40 +361,40 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-deduped-missing-dep ` exports[`test/lib/commands/ls.js TAP ls default --depth value should be 0 > should output tree containing only top-level dependencies 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-default---depth-value-should-be-0 +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 \`-- foo@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls empty location > should print empty result 1`] = ` -{CWD}/tap-testdir-ls-ls-empty-location +{CWD}/prefix \`-- (empty) ` exports[`test/lib/commands/ls.js TAP ls extraneous deps > should output containing problems info 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-extraneous-deps +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 extraneous \`-- foo@1.0.0 \`-- dog@1.0.0 ` -exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option > should list a in top-level only 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option should list a in top-level only > output 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/prefix \`-- a@1.0.0 ` -exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option > should print empty results msg 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option should print empty results msg > output 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/prefix \`-- (empty) ` -exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option > should print expected result 1`] = ` -test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg-using-depth-option +exports[`test/lib/commands/ls.js TAP ls filter pkg arg using depth option should print expected result > output 1`] = ` +test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/prefix \`-- b@1.0.0 \`-- c@1.0.0 \`-- d@1.0.0 @@ -402,7 +402,7 @@ test-pkg-arg-filter-with-depth-opt@1.0.0 {CWD}/tap-testdir-ls-ls-filter-pkg-arg- ` exports[`test/lib/commands/ls.js TAP ls filtering by child of missing dep > should print tree and not duplicate child of missing items 1`] = ` -filter-by-child-of-missing-dep@1.0.0 {CWD}/tap-testdir-ls-ls-filtering-by-child-of-missing-dep +filter-by-child-of-missing-dep@1.0.0 {CWD}/prefix +-- b@1.0.0 extraneous | \`-- c@1.0.0 deduped +-- c@1.0.0 extraneous @@ -412,13 +412,13 @@ filter-by-child-of-missing-dep@1.0.0 {CWD}/tap-testdir-ls-ls-filtering-by-child- ` exports[`test/lib/commands/ls.js TAP ls from and resolved properties > should not be printed in tree output 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-from-and-resolved-properties +test-npm-ls@1.0.0 {CWD}/prefix \`-- simple-output@2.1.1 ` exports[`test/lib/commands/ls.js TAP ls global > should print tree and not mark top-level items extraneous 1`] = ` -{CWD}/tap-testdir-ls-ls-global +{CWD}/global +-- a@1.0.0 \`-- b@1.0.0 \`-- c@1.0.0 @@ -426,7 +426,7 @@ exports[`test/lib/commands/ls.js TAP ls global > should print tree and not mark ` exports[`test/lib/commands/ls.js TAP ls invalid deduped dep > should output tree signaling mismatching peer dep in problems 1`] = ` -invalid-deduped-dep@1.0.0 {CWD}/tap-testdir-ls-ls-invalid-deduped-dep +invalid-deduped-dep@1.0.0 {CWD}/prefix +-- a@1.0.0 | \`-- b@1.0.0 deduped invalid: "^2.0.0" from the root project, "^2.0.0" from node_modules/a \`-- b@1.0.0 invalid: "^2.0.0" from the root project, "^2.0.0" from node_modules/a @@ -434,7 +434,7 @@ exports[`test/lib/commands/ls.js TAP ls invalid deduped dep > should output tree ` exports[`test/lib/commands/ls.js TAP ls invalid peer dep > should output tree signaling mismatching peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-invalid-peer-dep +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 +-- dev-dep@1.0.0 | \`-- foo@1.0.0 @@ -447,28 +447,28 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-invalid-peer-dep ` exports[`test/lib/commands/ls.js TAP ls json read problems > should print empty result 1`] = ` -{CWD}/tap-testdir-ls-ls-json-read-problems +{CWD}/prefix \`-- (empty) ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should filter by parent folder workspace config 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should filter by parent folder workspace config > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- e@1.0.0 -> ./group/e \`-- f@1.0.0 -> ./group/f ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should filter single workspace 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should filter single workspace > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- a@1.0.0 -> ./a | \`-- d@1.0.0 deduped -> ./d \`-- d@1.0.0 -> ./d ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should filter using workspace config 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should filter using workspace config > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix \`-- a@1.0.0 -> ./a +-- baz@1.0.0 +-- c@1.0.0 @@ -478,8 +478,8 @@ workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspac ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should inlude root and specified workspace 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should inlude root and specified workspace > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- d@1.0.0 -> ./d | \`-- foo@1.1.1 | \`-- bar@1.0.0 @@ -487,8 +487,8 @@ workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspac ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should list --all workspaces properly 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should list --all workspaces properly > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- a@1.0.0 -> ./a | +-- baz@1.0.0 | +-- c@1.0.0 @@ -503,8 +503,8 @@ workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspac ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should list only prod deps of workspaces 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should list only prod deps of workspaces > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- a@1.0.0 -> ./a | +-- c@1.0.0 | \`-- d@1.0.0 deduped -> ./d @@ -518,8 +518,8 @@ workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspac ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should list workspaces properly with default configs 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should list workspaces properly with default configs > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix +-- a@1.0.0 -> ./a | +-- baz@1.0.0 | +-- c@1.0.0 @@ -533,14 +533,14 @@ exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > s  ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should not list workspaces with --no-workspaces 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should not list workspaces with --no-workspaces > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix \`-- pacote@1.0.0  ` -exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces > should print all tree and filter by dep within only the ws subtree 1`] = ` -workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspaces +exports[`test/lib/commands/ls.js TAP ls loading a tree containing workspaces should print all tree and filter by dep within only the ws subtree > output 1`] = ` +workspaces-tree@1.0.0 {CWD}/prefix \`-- d@1.0.0 -> ./d \`-- foo@1.1.1 \`-- bar@1.0.0 @@ -548,7 +548,7 @@ workspaces-tree@1.0.0 {CWD}/tap-testdir-ls-ls-loading-a-tree-containing-workspac ` exports[`test/lib/commands/ls.js TAP ls missing package.json > should output tree missing name/version of top-level package 1`] = ` -{CWD}/tap-testdir-ls-ls-missing-package.json +{CWD}/prefix +-- chai@1.0.0 extraneous +-- dog@1.0.0 extraneous \`-- foo@1.0.0 extraneous @@ -557,7 +557,7 @@ exports[`test/lib/commands/ls.js TAP ls missing package.json > should output tre ` exports[`test/lib/commands/ls.js TAP ls missing/invalid/extraneous > should output tree containing missing, invalid, extraneous labels 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 extraneous +-- foo@1.0.0 invalid: "^2.0.0" from the root project | \`-- dog@1.0.0 @@ -566,7 +566,7 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous ` exports[`test/lib/commands/ls.js TAP ls no args > should output tree representation of dependencies structure 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-no-args +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 \`-- foo@1.0.0 \`-- dog@1.0.0 @@ -574,21 +574,21 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-no-args ` exports[`test/lib/commands/ls.js TAP ls overridden dep > should contain overridden outout 1`] = ` -test-overridden@1.0.0 {CWD}/tap-testdir-ls-ls-overridden-dep +test-overridden@1.0.0 {CWD}/prefix \`-- foo@1.0.0 \`-- bar@1.0.0 overridden ` exports[`test/lib/commands/ls.js TAP ls overridden dep w/ color > should contain overridden outout 1`] = ` -test-overridden@1.0.0 {CWD}/tap-testdir-ls-ls-overridden-dep-w-color +test-overridden@1.0.0 {CWD}/prefix \`-- foo@1.0.0  \`-- bar@1.0.0 overridden  ` exports[`test/lib/commands/ls.js TAP ls print deduped symlinks > should output tree containing linked deps 1`] = ` -print-deduped-symlinks@1.0.0 {CWD}/tap-testdir-ls-ls-print-deduped-symlinks +print-deduped-symlinks@1.0.0 {CWD}/prefix +-- a@1.0.0 | \`-- b@1.0.0 deduped -> ./b \`-- b@1.0.0 -> ./b @@ -596,13 +596,13 @@ print-deduped-symlinks@1.0.0 {CWD}/tap-testdir-ls-ls-print-deduped-symlinks ` exports[`test/lib/commands/ls.js TAP ls resolved points to git ref > should output tree containing git refs 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-resolved-points-to-git-ref +test-npm-ls@1.0.0 {CWD}/prefix \`-- abbrev@1.1.1 (git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c) ` exports[`test/lib/commands/ls.js TAP ls unmet optional dep > should output tree with empty entry for missing optional deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-unmet-optional-dep +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 +-- dev-dep@1.0.0 | \`-- foo@1.0.0 @@ -616,19 +616,19 @@ exports[`test/lib/commands/ls.js TAP ls unmet optional dep > should output tree ` exports[`test/lib/commands/ls.js TAP ls unmet peer dep > should output tree signaling missing peer dep in problems 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-unmet-peer-dep +test-npm-ls@1.0.0 {CWD}/prefix \`-- UNMET DEPENDENCY peer-dep@* ` exports[`test/lib/commands/ls.js TAP ls using aliases > should output tree containing aliases 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-using-aliases +test-npm-ls@1.0.0 {CWD}/prefix \`-- a@npm:b@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls with args and dedupe entries > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-args-and-dedupe-entries +dedupe-entries@1.0.0 {CWD}/prefix +-- @npmcli/a@1.0.0 | \`-- @npmcli/b@1.1.2 deduped +-- @npmcli/b@1.1.2 @@ -638,7 +638,7 @@ exports[`test/lib/commands/ls.js TAP ls with args and dedupe entries > should pr ` exports[`test/lib/commands/ls.js TAP ls with args and different order of items > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-args-and-different-order-of-items +dedupe-entries@1.0.0 {CWD}/prefix +-- @npmcli/a@1.0.0 | \`-- @npmcli/c@1.0.0 deduped +-- @npmcli/b@1.1.2 @@ -648,32 +648,32 @@ dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-args-and-different-order-of-it ` exports[`test/lib/commands/ls.js TAP ls with dot filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-dot-filter-arg +test-npm-ls@1.0.0 {CWD}/prefix \`-- (empty) ` exports[`test/lib/commands/ls.js TAP ls with filter arg > should output tree contaning only occurrences of filtered by package and colored output 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-filter-arg +test-npm-ls@1.0.0 {CWD}/prefix \`-- chai@1.0.0  ` exports[`test/lib/commands/ls.js TAP ls with filter arg nested dep > should output tree contaning only occurrences of filtered package and its ancestors 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-filter-arg-nested-dep +test-npm-ls@1.0.0 {CWD}/prefix \`-- foo@1.0.0 \`-- dog@1.0.0 ` exports[`test/lib/commands/ls.js TAP ls with missing filter arg > should output tree containing no dependencies info 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-missing-filter-arg +test-npm-ls@1.0.0 {CWD}/prefix \`-- (empty) ` exports[`test/lib/commands/ls.js TAP ls with multiple filter args > should output tree contaning only occurrences of multiple filtered packages and their ancestors 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-multiple-filter-args +test-npm-ls@1.0.0 {CWD}/prefix +-- chai@1.0.0 \`-- foo@1.0.0 \`-- dog@1.0.0 @@ -681,7 +681,7 @@ test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls-with-multiple-filter-args ` exports[`test/lib/commands/ls.js TAP ls with no args dedupe entries > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries +dedupe-entries@1.0.0 {CWD}/prefix +-- @npmcli/a@1.0.0 | \`-- @npmcli/b@1.1.2 deduped +-- @npmcli/b@1.1.2 @@ -691,7 +691,7 @@ dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries ` exports[`test/lib/commands/ls.js TAP ls with no args dedupe entries and not displaying all > should print tree output containing deduped ref 1`] = ` -dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries-and-not-displaying-all +dedupe-entries@1.0.0 {CWD}/prefix +-- @npmcli/a@1.0.0 +-- @npmcli/b@1.1.2 \`-- @npmcli/c@1.0.0 @@ -699,14 +699,14 @@ dedupe-entries@1.0.0 {CWD}/tap-testdir-ls-ls-with-no-args-dedupe-entries-and-not ` exports[`test/lib/commands/ls.js TAP ls workspace and missing optional dep > should omit missing optional dep 1`] = ` -root@ {CWD}/tap-testdir-ls-ls-workspace-and-missing-optional-dep +root@ {CWD}/prefix +-- baz@1.0.0 -> ./baz \`-- foo@1.0.0 ` exports[`test/lib/commands/ls.js TAP show multiple invalid reasons > ls result 1`] = ` -test-npm-ls@1.0.0 {cwd}/tap-testdir-ls-show-multiple-invalid-reasons +test-npm-ls@1.0.0 {CWD}/prefix +-- cat@1.0.0 invalid: "^2.0.0" from the root project | \`-- dog@1.0.0 deduped invalid: "^1.2.3" from the root project, "^2.0.0" from node_modules/cat +-- chai@1.0.0 extraneous diff --git a/deps/npm/tap-snapshots/test/lib/commands/outdated.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/outdated.js.test.cjs index ef6baa96661955..a72338b0bacc56 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/outdated.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/outdated.js.test.cjs @@ -6,237 +6,216 @@ */ 'use strict' exports[`test/lib/commands/outdated.js TAP aliases > should display aliased outdated dep output 1`] = ` - Package Current Wanted Latest Location Depended by -cat:dog@latest 1.0.0 2.0.0 2.0.0 node_modules/cat tap-testdir-outdated-aliases +cat:dog@latest 1.0.0 2.0.0 2.0.0 node_modules/cat prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --all > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps -chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix +chai 1.0.0 1.0.1 1.0.1 node_modules/chai prefix +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix +theta MISSING 1.0.1 1.0.1 - prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --json --long > must match snapshot 1`] = ` - { "cat": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat", + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/cat", "type": "dependencies" }, "chai": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai", + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/chai", "type": "peerDependencies" }, "dog": { "current": "1.0.1", "wanted": "1.0.1", "latest": "2.0.0", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog", + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/dog", "type": "dependencies" }, "theta": { "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", + "dependent": "prefix", "type": "dependencies" } } ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --json > must match snapshot 1`] = ` - { "cat": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat" + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/cat" }, "chai": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai" + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/chai" }, "dog": { "current": "1.0.1", "wanted": "1.0.1", "latest": "2.0.0", - "dependent": "tap-testdir-outdated-should-display-outdated-deps", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog" + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/dog" }, "theta": { "wanted": "1.0.1", "latest": "1.0.1", - "dependent": "tap-testdir-outdated-should-display-outdated-deps" + "dependent": "prefix" } } ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --long > must match snapshot 1`] = ` - -Package Current Wanted Latest Location Depended by Package Type Homepage -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps dependencies -chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps peerDependencies -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps dependencies -theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps dependencies +Package Current Wanted Latest Location Depended by Package Type Homepage +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix dependencies +chai 1.0.0 1.0.1 1.0.1 node_modules/chai prefix peerDependencies +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix dependencies +theta MISSING 1.0.1 1.0.1 - prefix dependencies ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --omit=dev --omit=peer > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix +theta MISSING 1.0.1 1.0.1 - prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --omit=dev > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps -chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix +chai 1.0.0 1.0.1 1.0.1 node_modules/chai prefix +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix +theta MISSING 1.0.1 1.0.1 - prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --omit=prod > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps -chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix +chai 1.0.0 1.0.1 1.0.1 node_modules/chai prefix +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --parseable --long > must match snapshot 1`] = ` - -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:tap-testdir-outdated-should-display-outdated-deps:dependencies: -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:tap-testdir-outdated-should-display-outdated-deps:peerDependencies: -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-should-display-outdated-deps:dependencies: -:theta@1.0.1:MISSING:theta@1.0.1:tap-testdir-outdated-should-display-outdated-deps:dependencies: +{CWD}/prefix/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:prefix:dependencies: +{CWD}/prefix/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:prefix:peerDependencies: +{CWD}/prefix/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:prefix:dependencies: +:theta@1.0.1:MISSING:theta@1.0.1:prefix:dependencies: ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated --parseable > must match snapshot 1`] = ` - -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:tap-testdir-outdated-should-display-outdated-deps -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:tap-testdir-outdated-should-display-outdated-deps -{CWD}/test/lib/commands/tap-testdir-outdated-should-display-outdated-deps/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-should-display-outdated-deps -:theta@1.0.1:MISSING:theta@1.0.1:tap-testdir-outdated-should-display-outdated-deps +{CWD}/prefix/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:prefix +{CWD}/prefix/node_modules/chai:chai@1.0.1:chai@1.0.0:chai@1.0.1:prefix +{CWD}/prefix/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:prefix +:theta@1.0.1:MISSING:theta@1.0.1:prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps -chai 1.0.0 1.0.1 1.0.1 node_modules/chai tap-testdir-outdated-should-display-outdated-deps -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-should-display-outdated-deps -theta MISSING 1.0.1 1.0.1 - tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix +chai 1.0.0 1.0.1 1.0.1 node_modules/chai prefix +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix +theta MISSING 1.0.1 1.0.1 - prefix ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated global > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat global ` exports[`test/lib/commands/outdated.js TAP should display outdated deps outdated specific dep > must match snapshot 1`] = ` - Package Current Wanted Latest Location Depended by -cat 1.0.0 1.0.1 1.0.1 node_modules/cat tap-testdir-outdated-should-display-outdated-deps +cat 1.0.0 1.0.1 1.0.1 node_modules/cat prefix ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display all dependencies 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display all dependencies > output 1`] = ` Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 chai 1.0.0 1.0.1 1.0.1 node_modules/chai foo -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix theta MISSING 1.0.1 1.0.1 - c@1.0.0 ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display json results filtered by ws 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display json results filtered by ws > output 1`] = ` { "cat": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", "dependent": "a", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/cat" + "location": "{CWD}/prefix/node_modules/cat" } } ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display missing deps when filtering by ws 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display missing deps when filtering by ws > output 1`] = ` Package Current Wanted Latest Location Depended by theta MISSING 1.0.1 1.0.1 - c@1.0.0 ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display nested deps when filtering by ws and using --all 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display nested deps when filtering by ws and using --all > output 1`] = ` Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 chai 1.0.0 1.0.1 1.0.1 node_modules/chai foo ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display no results if ws has no deps to display 1`] = ` +exports[`test/lib/commands/outdated.js TAP workspaces should display no results if ws has no deps to display > output 1`] = ` ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display only root outdated when ws disabled 1`] = ` +exports[`test/lib/commands/outdated.js TAP workspaces should display only root outdated when ws disabled > output 1`] = ` ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display parseable results filtered by ws 1`] = ` - -{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a +exports[`test/lib/commands/outdated.js TAP workspaces should display parseable results filtered by ws > output 1`] = ` +{CWD}/prefix/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display results filtered by ws 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display results filtered by ws > output 1`] = ` Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display ws outdated deps human output 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display ws outdated deps human output > output 1`] = ` Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix theta MISSING 1.0.1 1.0.1 - c@1.0.0 ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display ws outdated deps json output 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should display ws outdated deps json output > output 1`] = ` { "cat": { "current": "1.0.0", "wanted": "1.0.1", "latest": "1.0.1", "dependent": "a", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/cat" + "location": "{CWD}/prefix/node_modules/cat" }, "dog": { "current": "1.0.1", "wanted": "1.0.1", "latest": "2.0.0", - "dependent": "tap-testdir-outdated-workspaces", - "location": "{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/dog" + "dependent": "prefix", + "location": "{CWD}/prefix/node_modules/dog" }, "theta": { "wanted": "1.0.1", @@ -246,17 +225,15 @@ exports[`test/lib/commands/outdated.js TAP workspaces > should display ws outdat } ` -exports[`test/lib/commands/outdated.js TAP workspaces > should display ws outdated deps parseable output 1`] = ` - -{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a -{CWD}/test/lib/commands/tap-testdir-outdated-workspaces/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:tap-testdir-outdated-workspaces +exports[`test/lib/commands/outdated.js TAP workspaces should display ws outdated deps parseable output > output 1`] = ` +{CWD}/prefix/node_modules/cat:cat@1.0.1:cat@1.0.0:cat@1.0.1:a +{CWD}/prefix/node_modules/dog:dog@1.0.1:dog@1.0.1:dog@2.0.0:prefix :theta@1.0.1:MISSING:theta@1.0.1:c ` -exports[`test/lib/commands/outdated.js TAP workspaces > should highlight ws in dependend by section 1`] = ` - +exports[`test/lib/commands/outdated.js TAP workspaces should highlight ws in dependend by section > output 1`] = ` Package Current Wanted Latest Location Depended by cat 1.0.0 1.0.1 1.0.1 node_modules/cat a@1.0.0 -dog 1.0.1 1.0.1 2.0.0 node_modules/dog tap-testdir-outdated-workspaces +dog 1.0.1 1.0.1 2.0.0 node_modules/dog prefix theta MISSING 1.0.1 1.0.1 - c@1.0.0 ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/profile.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/profile.js.test.cjs index 2103ccdd32e334..4959f7cdd2cc32 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/profile.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/profile.js.test.cjs @@ -8,8 +8,7 @@ exports[`test/lib/commands/profile.js TAP enable-2fa from token and set otp, retries on pending and verifies with qrcode > should output 2fa enablement success msgs 1`] = ` Scan into your authenticator app: qrcode - Or enter code: -1234 + Or enter code: 1234 2FA successfully enabled. Below are your recovery codes, please print these out. You will need these to recover access to your account if you lose your authentication device. 123456 diff --git a/deps/npm/tap-snapshots/test/lib/commands/query.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/query.js.test.cjs index 9ad6e2e38084eb..a6dbfcf7c693c8 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/query.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/query.js.test.cjs @@ -13,8 +13,8 @@ exports[`test/lib/commands/query.js TAP global > should return global package 1` "_id": "lorem@2.0.0", "pkgid": "lorem@2.0.0", "location": "node_modules/lorem", - "path": "{CWD}/test/lib/commands/tap-testdir-query-global/global/node_modules/lorem", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-global/global/node_modules/lorem", + "path": "{CWD}/global/node_modules/lorem", + "realpath": "{CWD}/global/node_modules/lorem", "resolved": null, "from": [ "" @@ -42,8 +42,8 @@ exports[`test/lib/commands/query.js TAP include-workspace-root > should return w }, "pkgid": "project@", "location": "", - "path": "{CWD}/test/lib/commands/tap-testdir-query-include-workspace-root/prefix", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-include-workspace-root/prefix", + "path": "{CWD}/prefix", + "realpath": "{CWD}/prefix", "resolved": null, "from": [], "to": [ @@ -63,8 +63,8 @@ exports[`test/lib/commands/query.js TAP include-workspace-root > should return w "_id": "c@1.0.0", "pkgid": "c@1.0.0", "location": "c", - "path": "{CWD}/test/lib/commands/tap-testdir-query-include-workspace-root/prefix/c", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-include-workspace-root/prefix/c", + "path": "{CWD}/prefix/c", + "realpath": "{CWD}/prefix/c", "resolved": null, "from": [], "to": [], @@ -85,8 +85,8 @@ exports[`test/lib/commands/query.js TAP linked node > should return linked node "_id": "a@1.0.0", "pkgid": "a@1.0.0", "location": "a", - "path": "{CWD}/test/lib/commands/tap-testdir-query-linked-node/prefix/a", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-linked-node/prefix/a", + "path": "{CWD}/prefix/a", + "realpath": "{CWD}/prefix/a", "resolved": null, "from": [], "to": [], @@ -109,8 +109,8 @@ exports[`test/lib/commands/query.js TAP recursive tree > should return everythin }, "pkgid": "project@", "location": "", - "path": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix", + "path": "{CWD}/prefix", + "realpath": "{CWD}/prefix", "resolved": null, "from": [], "to": [ @@ -126,8 +126,8 @@ exports[`test/lib/commands/query.js TAP recursive tree > should return everythin { "pkgid": "a@", "location": "node_modules/a", - "path": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix/node_modules/a", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix/node_modules/a", + "path": "{CWD}/prefix/node_modules/a", + "realpath": "{CWD}/prefix/node_modules/a", "resolved": null, "from": [ "" @@ -142,8 +142,8 @@ exports[`test/lib/commands/query.js TAP recursive tree > should return everythin { "pkgid": "b@", "location": "node_modules/b", - "path": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix/node_modules/b", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-recursive-tree/prefix/node_modules/b", + "path": "{CWD}/prefix/node_modules/b", + "realpath": "{CWD}/prefix/node_modules/b", "resolved": null, "from": [ "" @@ -171,8 +171,8 @@ exports[`test/lib/commands/query.js TAP simple query > should return root object }, "pkgid": "project@", "location": "", - "path": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix", + "path": "{CWD}/prefix", + "realpath": "{CWD}/prefix", "resolved": null, "from": [], "to": [ @@ -188,8 +188,8 @@ exports[`test/lib/commands/query.js TAP simple query > should return root object { "pkgid": "a@", "location": "node_modules/a", - "path": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix/node_modules/a", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix/node_modules/a", + "path": "{CWD}/prefix/node_modules/a", + "realpath": "{CWD}/prefix/node_modules/a", "resolved": null, "from": [ "" @@ -204,8 +204,8 @@ exports[`test/lib/commands/query.js TAP simple query > should return root object { "pkgid": "b@", "location": "node_modules/b", - "path": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix/node_modules/b", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-simple-query/prefix/node_modules/b", + "path": "{CWD}/prefix/node_modules/b", + "realpath": "{CWD}/prefix/node_modules/b", "resolved": null, "from": [ "" @@ -228,8 +228,8 @@ exports[`test/lib/commands/query.js TAP workspace query > should return workspac "_id": "c@1.0.0", "pkgid": "c@1.0.0", "location": "c", - "path": "{CWD}/test/lib/commands/tap-testdir-query-workspace-query/prefix/c", - "realpath": "{CWD}/test/lib/commands/tap-testdir-query-workspace-query/prefix/c", + "path": "{CWD}/prefix/c", + "realpath": "{CWD}/prefix/c", "resolved": null, "from": [], "to": [], diff --git a/deps/npm/tap-snapshots/test/lib/commands/stars.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/stars.js.test.cjs index fbf074f718d1d9..d55d7b414d7fd2 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/stars.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/stars.js.test.cjs @@ -6,7 +6,6 @@ */ 'use strict' exports[`test/lib/commands/stars.js TAP no args > should output a list of starred packages 1`] = ` - @npmcli/arborist @npmcli/map-workspaces libnpmfund diff --git a/deps/npm/tap-snapshots/test/lib/commands/team.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/team.js.test.cjs index 6a93234f54fc84..f72fcb2f1fa943 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/team.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/team.js.test.cjs @@ -37,18 +37,18 @@ ruyadorno ` exports[`test/lib/commands/team.js TAP team ls default output > should list users for a given scope:team 1`] = ` - @npmcli:developers has 4 users: -darcyclarke isaacs nlf ruyadorno +darcyclarke +isaacs +nlf +ruyadorno ` exports[`test/lib/commands/team.js TAP team ls no users > should list no users for a given scope 1`] = ` - @npmcli:developers has 0 users ` exports[`test/lib/commands/team.js TAP team ls single user > should list single user for a given scope 1`] = ` - @npmcli:developers has 1 user: foo ` @@ -60,18 +60,17 @@ npmcli:product ` exports[`test/lib/commands/team.js TAP team ls default output > should list teams for a given scope 1`] = ` - @npmcli has 3 teams: -@npmcli:designers @npmcli:developers @npmcli:product +@npmcli:designers +@npmcli:developers +@npmcli:product ` exports[`test/lib/commands/team.js TAP team ls no teams > should list no teams for a given scope 1`] = ` - @npmcli has 0 teams ` exports[`test/lib/commands/team.js TAP team ls single team > should list single team for a given scope 1`] = ` - @npmcli has 1 team: @npmcli:developers ` diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs index c18dd305b052b4..578bbd79812a22 100644 --- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs @@ -604,8 +604,8 @@ safer to use a registry-provided authentication bearer token stored in the current level * Type: null, "restricted", or "public" -If do not want your scoped package to be publicly viewable (and installable) -set \`--access=restricted\`. +If you do not want your scoped package to be publicly viewable (and +installable) set \`--access=restricted\`. Unscoped packages can not be set to \`restricted\`. @@ -654,7 +654,8 @@ exit code. * Default: "web" * Type: "legacy" or "web" -What authentication strategy to use with \`login\`. +What authentication strategy to use with \`login\`. Note that if an \`otp\` +config is given, this value will always be set to \`legacy\`. #### \`before\` @@ -1702,7 +1703,7 @@ be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \`peerDependencies\` object. -When such and override is performed, a warning is printed, explaining the +When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \`--strict-peer-deps\` is set, then this warning is treated as a failure. @@ -1983,7 +1984,7 @@ Alias for \`--include=dev\`. \`--install-strategy=shallow\` Only install direct dependencies in the top level \`node_modules\`, but hoist -on deeper dependendencies. Sets \`--install-strategy=shallow\`. +on deeper dependencies. Sets \`--install-strategy=shallow\`. #### \`init.author.email\` @@ -2380,6 +2381,7 @@ Array [ "tag", "tag-version-prefix", "umask", + "unicode", "user-agent", "workspace", "workspaces", @@ -2409,7 +2411,6 @@ Array [ "prefix", "timing", "tmp", - "unicode", "update-notifier", "usage", "userconfig", @@ -2491,6 +2492,8 @@ npm access grant [] npm access revoke [] \`\`\` +Note: This command is unaware of workspaces. + #### \`json\` #### \`otp\` #### \`registry\` @@ -2515,6 +2518,8 @@ npm adduser alias: add-user \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`scope\` #### \`auth-type\` @@ -2603,6 +2608,8 @@ npm cache ls [@] npm cache verify \`\`\` +Note: This command is unaware of workspaces. + #### \`cache\` ` @@ -2665,6 +2672,8 @@ Run "npm help completion" for more info npm completion \`\`\` +Note: This command is unaware of workspaces. + NO PARAMS ` @@ -2698,6 +2707,8 @@ npm config fix alias: c \`\`\` +Note: This command is unaware of workspaces. + #### \`json\` #### \`global\` #### \`editor\` @@ -2761,6 +2772,8 @@ Run "npm help deprecate" for more info npm deprecate \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`otp\` ` @@ -2872,6 +2885,8 @@ Run "npm help doctor" for more info npm doctor [ping] [registry] [versions] [environment] [permissions] [cache] \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` ` @@ -2890,6 +2905,8 @@ Run "npm help edit" for more info npm edit [/...] \`\`\` +Note: This command is unaware of workspaces. + #### \`editor\` ` @@ -2965,6 +2982,8 @@ Run "npm help explore" for more info npm explore [ -- ] \`\`\` +Note: This command is unaware of workspaces. + #### \`shell\` ` @@ -3040,6 +3059,8 @@ Run "npm help get" for more info npm get [ ...] (See \`npm config\`) \`\`\` +Note: This command is unaware of workspaces. + NO PARAMS ` @@ -3062,6 +3083,8 @@ npm help [] alias: hlep \`\`\` +Note: This command is unaware of workspaces. + #### \`viewer\` ` @@ -3080,6 +3103,8 @@ Run "npm help help-search" for more info npm help-search \`\`\` +Note: This command is unaware of workspaces. + #### \`long\` ` @@ -3104,6 +3129,8 @@ npm hook rm npm hook update \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`otp\` ` @@ -3112,7 +3139,7 @@ exports[`test/lib/docs.js TAP usage init > must match snapshot 1`] = ` Create a package.json file Usage: -npm init (same as \`npx ) +npm init (same as \`npx \`) npm init <@scope> (same as \`npx <@scope>/create\`) Options: @@ -3125,7 +3152,7 @@ aliases: create, innit Run "npm help init" for more info \`\`\`bash -npm init (same as \`npx ) +npm init (same as \`npx \`) npm init <@scope> (same as \`npx <@scope>/create\`) aliases: create, innit @@ -3380,6 +3407,8 @@ Run "npm help login" for more info npm login \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`scope\` #### \`auth-type\` @@ -3400,6 +3429,8 @@ Run "npm help logout" for more info npm logout \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`scope\` ` @@ -3448,6 +3479,8 @@ exports[`test/lib/docs.js TAP usage npm > must match snapshot 1`] = ` npm \`\`\` +Note: This command is unaware of workspaces. + NO PARAMS ` @@ -3485,6 +3518,8 @@ npm org ls orgname [] alias: ogr \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`otp\` #### \`json\` @@ -3586,6 +3621,8 @@ Run "npm help ping" for more info npm ping \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` ` @@ -3635,6 +3672,8 @@ Run "npm help prefix" for more info npm prefix [-g] \`\`\` +Note: This command is unaware of workspaces. + #### \`global\` ` @@ -3659,6 +3698,8 @@ npm profile get [] npm profile set \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`json\` #### \`parseable\` @@ -3832,6 +3873,8 @@ Run "npm help root" for more info npm root \`\`\` +Note: This command is unaware of workspaces. + #### \`global\` ` @@ -3886,6 +3929,8 @@ npm search [search terms ...] aliases: find, s, se \`\`\` +Note: This command is unaware of workspaces. + #### \`long\` #### \`json\` #### \`color\` @@ -3911,6 +3956,8 @@ Run "npm help set" for more info npm set = [= ...] (See \`npm config\`) \`\`\` +Note: This command is unaware of workspaces. + NO PARAMS ` @@ -3926,6 +3973,8 @@ Run "npm help shrinkwrap" for more info npm shrinkwrap \`\`\` +Note: This command is unaware of workspaces. + NO PARAMS ` @@ -3944,6 +3993,8 @@ Run "npm help star" for more info npm star [...] \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`unicode\` #### \`otp\` @@ -3964,6 +4015,8 @@ Run "npm help stars" for more info npm stars [] \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` ` @@ -4028,6 +4081,8 @@ npm team rm [--otp ] npm team ls | \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`otp\` #### \`parseable\` @@ -4077,6 +4132,8 @@ npm token revoke npm token create [--read-only] [--cidr=list] \`\`\` +Note: This command is unaware of workspaces. + #### \`read-only\` #### \`cidr\` #### \`registry\` @@ -4149,6 +4206,8 @@ Run "npm help unstar" for more info npm unstar [...] \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` #### \`unicode\` #### \`otp\` @@ -4274,5 +4333,7 @@ Run "npm help whoami" for more info npm whoami \`\`\` +Note: This command is unaware of workspaces. + #### \`registry\` ` diff --git a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs index 7b689ff9728203..5fe20969fce315 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs @@ -390,7 +390,7 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-false-cachePath-false-cacheDest-true-/cache/dest", + "dest": "{CWD}/cache/dest", "path": "/not/cache/dir/path", }, ], @@ -424,7 +424,7 @@ Object { Error: whoopsie { "code": "EACCES", "dest": "/not/cache/dir/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-false-cachePath-true-cacheDest-false-/cache/path", + "path": "{CWD}/cache/path", }, ], ], @@ -456,8 +456,8 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-false-cachePath-true-cacheDest-true-/cache/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-false-cachePath-true-cacheDest-true-/cache/path", + "dest": "{CWD}/cache/dest", + "path": "{CWD}/cache/path", }, ], ], @@ -505,15 +505,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-false-cacheDest-false-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-false-cacheDest-false-/cache/_logs/{DATE}-debug-0.log", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -530,7 +530,7 @@ Object { previous versions of npm which has since been addressed. To permanently fix this problem, please run: - sudo chown -R 867:5309 "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-false-cacheDest-true-/cache" + sudo chown -R 867:5309 "{CWD}/cache" ), ], ], @@ -545,18 +545,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-false-cacheDest-true-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-false-cacheDest-true-/cache/_logs/{DATE}-debug-0.log", - ], - Array [ - "dummy stack trace", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -573,7 +570,7 @@ Object { previous versions of npm which has since been addressed. To permanently fix this problem, please run: - sudo chown -R 867:5309 "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-false-/cache" + sudo chown -R 867:5309 "{CWD}/cache" ), ], ], @@ -588,18 +585,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-false-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-false-/cache/_logs/{DATE}-debug-0.log", - ], - Array [ - "dummy stack trace", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -616,7 +610,7 @@ Object { previous versions of npm which has since been addressed. To permanently fix this problem, please run: - sudo chown -R 867:5309 "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-true-/cache" + sudo chown -R 867:5309 "{CWD}/cache" ), ], ], @@ -631,18 +625,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-true-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-false-loaded-true-cachePath-true-cacheDest-true-/cache/_logs/{DATE}-debug-0.log", - ], - Array [ - "dummy stack trace", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -703,7 +694,7 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-false-cachePath-false-cacheDest-true-/cache/dest", + "dest": "{CWD}/cache/dest", "path": "/not/cache/dir/path", }, ], @@ -738,7 +729,7 @@ Object { Error: whoopsie { "code": "EACCES", "dest": "/not/cache/dir/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-false-cachePath-true-cacheDest-false-/cache/path", + "path": "{CWD}/cache/path", }, ], ], @@ -771,8 +762,8 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-false-cachePath-true-cacheDest-true-/cache/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-false-cachePath-true-cacheDest-true-/cache/path", + "dest": "{CWD}/cache/dest", + "path": "{CWD}/cache/path", }, ], ], @@ -821,15 +812,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-false-cacheDest-false-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-false-cacheDest-false-/cache/_logs/{DATE}-debug-0.log", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -856,7 +847,7 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-false-cacheDest-true-/cache/dest", + "dest": "{CWD}/cache/dest", "path": "/not/cache/dir/path", }, ], @@ -872,15 +863,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-false-cacheDest-true-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-false-cacheDest-true-/cache/_logs/{DATE}-debug-0.log", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -908,7 +899,7 @@ Object { Error: whoopsie { "code": "EACCES", "dest": "/not/cache/dir/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-false-/cache/path", + "path": "{CWD}/cache/path", }, ], ], @@ -923,15 +914,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-false-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-false-/cache/_logs/{DATE}-debug-0.log", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -958,8 +949,8 @@ Object { "", Error: whoopsie { "code": "EACCES", - "dest": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-true-/cache/dest", - "path": "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-true-/cache/path", + "dest": "{CWD}/cache/dest", + "path": "{CWD}/cache/path", }, ], ], @@ -974,15 +965,15 @@ Array [ ], Array [ "argv", - "", + "/"--fetch-retries/" /"0/" /"--cache/" /"{CWD}/cache/"", ], Array [ "logfile", - "logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-true-/cache/_logs/{DATE}-", + "logs-max:10 dir:{CWD}/cache/_logs/{DATE}-", ], Array [ "logfile", - "{CWD}/test/lib/utils/tap-testdir-error-message-eacces-eperm--windows-true-loaded-true-cachePath-true-cacheDest-true-/cache/_logs/{DATE}-debug-0.log", + "{CWD}/cache/_logs/{DATE}-debug-0.log", ], ] ` @@ -1280,7 +1271,7 @@ Object { String( Not compatible with your version of node/npm: some@package Required: undefined - Actual: {"npm":"123.456.789-npm","node":"99.99.99"} + Actual: {"npm":"123.456.789-npm","node":"123.456.789-node"} ), ], ], diff --git a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs index eed705be384a25..4c163e7df5593d 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs @@ -12,18 +12,18 @@ exports[`test/lib/utils/exit-handler.js TAP handles unknown error with logs and 15 timing npm:load:mkdirpcache Completed in {TIME}ms 16 timing npm:load:mkdirplogs Completed in {TIME}ms 17 verbose title npm -18 verbose argv +18 verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "notice" 19 timing npm:load:setTitle Completed in {TIME}ms 21 timing npm:load:display Completed in {TIME}ms -22 verbose logfile logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}- -23 verbose logfile {CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}-debug-0.log +22 verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +23 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log 24 timing npm:load:logFile Completed in {TIME}ms 25 timing npm:load:timers Completed in {TIME}ms 26 timing npm:load:configScope Completed in {TIME}ms 27 timing npm:load Completed in {TIME}ms 28 silly logfile done cleaning log files 29 verbose stack Error: Unknown error -30 verbose cwd {CWD} +30 verbose cwd {CWD}/prefix 31 verbose Foo 1.0.0 32 verbose node v1.0.0 33 verbose npm v1.0.0 @@ -31,10 +31,10 @@ exports[`test/lib/utils/exit-handler.js TAP handles unknown error with logs and 35 error ERR SUMMARY Unknown error 36 error ERR DETAIL Unknown error 37 verbose exit 1 -39 timing npm Completed in {TIME}ms -40 verbose code 1 -41 error A complete log of this run can be found in: -41 error {CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}-debug-0.log +38 timing npm Completed in {TIME}ms +39 verbose code 1 +40 error A complete log of this run can be found in: +40 error {CWD}/cache/_logs/{DATE}-debug-0.log ` exports[`test/lib/utils/exit-handler.js TAP handles unknown error with logs and debug file > logs 1`] = ` @@ -44,18 +44,18 @@ timing npm:load:configload Completed in {TIME}ms timing npm:load:mkdirpcache Completed in {TIME}ms timing npm:load:mkdirplogs Completed in {TIME}ms verbose title npm -verbose argv +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "notice" timing npm:load:setTitle Completed in {TIME}ms timing npm:load:display Completed in {TIME}ms -verbose logfile logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}- -verbose logfile {CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}-debug-0.log +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log timing npm:load:logFile Completed in {TIME}ms timing npm:load:timers Completed in {TIME}ms timing npm:load:configScope Completed in {TIME}ms timing npm:load Completed in {TIME}ms silly logfile done cleaning log files verbose stack Error: Unknown error -verbose cwd {CWD} +verbose cwd {CWD}/prefix verbose Foo 1.0.0 verbose node v1.0.0 verbose npm v1.0.0 @@ -66,5 +66,5 @@ verbose exit 1 timing npm Completed in {TIME}ms verbose code 1 error A complete log of this run can be found in: - {CWD}/test/lib/utils/tap-testdir-exit-handler-handles-unknown-error-with-logs-and-debug-file/cache/_logs/{DATE}-debug-0.log + {CWD}/cache/_logs/{DATE}-debug-0.log ` diff --git a/deps/npm/tap-snapshots/test/lib/utils/log-file.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/log-file.js.test.cjs index 912a4365f68104..0a4af7cadf0607 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/log-file.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/log-file.js.test.cjs @@ -6,7 +6,7 @@ */ 'use strict' exports[`test/lib/utils/log-file.js TAP snapshot > must match snapshot 1`] = ` -0 verbose logfile logs-max:10 dir:{CWD}/test/lib/utils/tap-testdir-log-file-snapshot/{DATE}- +0 verbose logfile logs-max:10 dir:{CWD}/{DATE}- 1 silly logfile done cleaning log files 2 error no prefix 3 error prefix with prefix diff --git a/deps/npm/tap-snapshots/test/lib/utils/reify-output.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/reify-output.js.test.cjs index 755b2364253047..3fb3fa2611c231 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/reify-output.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/reify-output.js.test.cjs @@ -15,12 +15,12 @@ exports[`test/lib/utils/reify-output.js TAP added packages should be looked up w up to date in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` up to date in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -30,14 +30,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` up to date, audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -52,21 +52,21 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` up to date, audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":false} 2`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":false} 2`] = ` up to date, audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -81,7 +81,7 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":true} 2`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":0,"audited":2,"json":true} 2`] = ` { "added": 0, "removed": 0, @@ -99,12 +99,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -114,14 +114,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -136,14 +136,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -158,12 +158,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -173,14 +173,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -195,14 +195,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 0, @@ -217,12 +217,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` removed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -232,14 +232,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` removed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -254,14 +254,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` removed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -276,12 +276,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` removed 1 package, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -291,14 +291,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` removed 1 package, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -313,14 +313,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` removed 1 package, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -335,12 +335,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` removed 1 package, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -350,14 +350,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` removed 1 package, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -372,14 +372,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` removed 1 package, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 1, @@ -394,12 +394,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` removed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -409,14 +409,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` removed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -431,14 +431,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` removed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -453,12 +453,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` removed 2 packages, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -468,14 +468,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` removed 2 packages, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -490,14 +490,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` removed 2 packages, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -512,12 +512,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` removed 2 packages, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -527,14 +527,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` removed 2 packages, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -549,14 +549,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` removed 2 packages, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":0,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 0, "removed": 2, @@ -571,12 +571,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` added 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -586,14 +586,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` added 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -608,14 +608,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` added 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -630,12 +630,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` added 1 package, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -645,14 +645,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` added 1 package, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -667,14 +667,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` added 1 package, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -689,12 +689,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` added 1 package, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -704,14 +704,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` added 1 package, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -726,14 +726,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` added 1 package, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 0, @@ -748,12 +748,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` added 1 package, and removed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -763,14 +763,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` added 1 package, removed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -785,14 +785,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` added 1 package, removed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -807,12 +807,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` added 1 package, removed 1 package, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -822,14 +822,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` added 1 package, removed 1 package, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -844,14 +844,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` added 1 package, removed 1 package, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -866,12 +866,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` added 1 package, removed 1 package, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -881,14 +881,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` added 1 package, removed 1 package, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -903,14 +903,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` added 1 package, removed 1 package, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 1, @@ -925,12 +925,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` added 1 package, and removed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -940,14 +940,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` added 1 package, removed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -962,14 +962,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` added 1 package, removed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -984,12 +984,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` added 1 package, removed 2 packages, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -999,14 +999,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` added 1 package, removed 2 packages, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -1021,14 +1021,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` added 1 package, removed 2 packages, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -1043,12 +1043,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` added 1 package, removed 2 packages, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -1058,14 +1058,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` added 1 package, removed 2 packages, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -1080,14 +1080,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` added 1 package, removed 2 packages, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":1,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 1, "removed": 2, @@ -1102,12 +1102,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":0,"json":false} 1`] = ` added 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1117,14 +1117,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":1,"json":false} 1`] = ` added 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1139,14 +1139,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":2,"json":false} 1`] = ` added 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1161,12 +1161,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":0,"json":false} 1`] = ` added 2 packages, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1176,14 +1176,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":1,"json":false} 1`] = ` added 2 packages, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1198,14 +1198,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":2,"json":false} 1`] = ` added 2 packages, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1220,12 +1220,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":0,"json":false} 1`] = ` added 2 packages, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1235,14 +1235,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":1,"json":false} 1`] = ` added 2 packages, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1257,14 +1257,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":2,"json":false} 1`] = ` added 2 packages, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":0,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 0, @@ -1279,12 +1279,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":0,"json":false} 1`] = ` added 2 packages, and removed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1294,14 +1294,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":1,"json":false} 1`] = ` added 2 packages, removed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1316,14 +1316,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":2,"json":false} 1`] = ` added 2 packages, removed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1338,12 +1338,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":0,"json":false} 1`] = ` added 2 packages, removed 1 package, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1353,14 +1353,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":1,"json":false} 1`] = ` added 2 packages, removed 1 package, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1375,14 +1375,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":2,"json":false} 1`] = ` added 2 packages, removed 1 package, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1397,12 +1397,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":0,"json":false} 1`] = ` added 2 packages, removed 1 package, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1412,14 +1412,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":1,"json":false} 1`] = ` added 2 packages, removed 1 package, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1434,14 +1434,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":2,"json":false} 1`] = ` added 2 packages, removed 1 package, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":1,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 1, @@ -1456,12 +1456,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":0,"json":false} 1`] = ` added 2 packages, and removed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1471,14 +1471,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":1,"json":false} 1`] = ` added 2 packages, removed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1493,14 +1493,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":2,"json":false} 1`] = ` added 2 packages, removed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":0,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1515,12 +1515,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":0,"json":false} 1`] = ` added 2 packages, removed 2 packages, and changed 1 package in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1530,14 +1530,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":1,"json":false} 1`] = ` added 2 packages, removed 2 packages, changed 1 package, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1552,14 +1552,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":2,"json":false} 1`] = ` added 2 packages, removed 2 packages, changed 1 package, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":1,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1574,12 +1574,12 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":0,"json":false} 1`] = ` added 2 packages, removed 2 packages, and changed 2 packages in {TIME} ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":0,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1589,14 +1589,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":1,"json":false} 1`] = ` added 2 packages, removed 2 packages, changed 2 packages, and audited 1 package in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":1,"json":true} 1`] = ` { "added": 2, "removed": 2, @@ -1611,14 +1611,14 @@ exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added": } ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":2,"json":false} 1`] = ` added 2 packages, removed 2 packages, changed 2 packages, and audited 2 packages in {TIME} -found 0 vulnerabilities +found 0 vulnerabilities ` -exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` +exports[`test/lib/utils/reify-output.js TAP packages changed message > {"added":2,"removed":2,"changed":2,"audited":2,"json":true} 1`] = ` { "added": 2, "removed": 2, diff --git a/deps/npm/test/bin/npm-cli.js b/deps/npm/test/bin/npm-cli.js index 7b4b619e2b7712..134208c8160c1e 100644 --- a/deps/npm/test/bin/npm-cli.js +++ b/deps/npm/test/bin/npm-cli.js @@ -1,7 +1,9 @@ const t = require('tap') +const tmock = require('../fixtures/tmock') + t.test('loading the bin calls the implementation', t => { - t.mock('../../bin/npm-cli.js', { - '../../lib/cli.js': proc => { + tmock(t, '{BIN}/npm-cli.js', { + '{LIB}/cli.js': proc => { t.equal(proc, process, 'called implementation with process object') t.end() }, diff --git a/deps/npm/test/bin/npx-cli.js b/deps/npm/test/bin/npx-cli.js index b526f2dfbe32eb..5670f24f07b77a 100644 --- a/deps/npm/test/bin/npx-cli.js +++ b/deps/npm/test/bin/npx-cli.js @@ -1,45 +1,46 @@ const t = require('tap') -const npx = require.resolve('../../bin/npx-cli.js') -const cli = require.resolve('../../lib/cli.js') -const npm = require.resolve('../../bin/npm-cli.js') +const mockGlobals = require('../fixtures/mock-globals') +const tmock = require('../fixtures/tmock') -const logs = [] -console.error = (...msg) => logs.push(msg) +const npm = require.resolve('../../bin/npm-cli.js') +const npx = require.resolve('../../bin/npx-cli.js') -t.afterEach(() => (logs.length = 0)) +const mockNpx = (t, argv) => { + const logs = [] + mockGlobals(t, { + 'process.argv': argv, + 'console.error': (...msg) => logs.push(msg), + }) + tmock(t, '{BIN}/npx-cli.js', { '{LIB}/cli.js': () => {} }) + return { + logs, + argv: process.argv, + } +} -t.test('npx foo -> npm exec -- foo', t => { - process.argv = ['node', npx, 'foo'] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, ['node', npm, 'exec', '--', 'foo']) - t.end() +t.test('npx foo -> npm exec -- foo', async t => { + const { argv } = mockNpx(t, ['node', npx, 'foo']) + t.strictSame(argv, ['node', npm, 'exec', '--', 'foo']) }) -t.test('npx -- foo -> npm exec -- foo', t => { - process.argv = ['node', npx, '--', 'foo'] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, ['node', npm, 'exec', '--', 'foo']) - t.end() +t.test('npx -- foo -> npm exec -- foo', async t => { + const { argv } = mockNpx(t, ['node', npx, '--', 'foo']) + t.strictSame(argv, ['node', npm, 'exec', '--', 'foo']) }) -t.test('npx -x y foo -z -> npm exec -x y -- foo -z', t => { - process.argv = ['node', npx, '-x', 'y', 'foo', '-z'] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, ['node', npm, 'exec', '-x', 'y', '--', 'foo', '-z']) - t.end() +t.test('npx -x y foo -z -> npm exec -x y -- foo -z', async t => { + const { argv } = mockNpx(t, ['node', npx, '-x', 'y', 'foo', '-z']) + t.strictSame(argv, ['node', npm, 'exec', '-x', 'y', '--', 'foo', '-z']) }) -t.test('npx --x=y --no-install foo -z -> npm exec --x=y -- foo -z', t => { - process.argv = ['node', npx, '--x=y', '--no-install', 'foo', '-z'] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, ['node', npm, 'exec', '--x=y', '--yes=false', '--', 'foo', '-z']) - t.end() +t.test('npx --x=y --no-install foo -z -> npm exec --x=y -- foo -z', async t => { + const { argv } = mockNpx(t, ['node', npx, '--x=y', '--no-install', 'foo', '-z']) + t.strictSame(argv, ['node', npm, 'exec', '--x=y', '--yes=false', '--', 'foo', '-z']) }) -t.test('transform renamed options into proper values', t => { - process.argv = ['node', npx, '-y', '--shell=bash', '-p', 'foo', '-c', 'asdf'] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, [ +t.test('transform renamed options into proper values', async t => { + const { argv } = mockNpx(t, ['node', npx, '-y', '--shell=bash', '-p', 'foo', '-c', 'asdf']) + t.strictSame(argv, [ 'node', npm, 'exec', @@ -50,12 +51,11 @@ t.test('transform renamed options into proper values', t => { '--call', 'asdf', ]) - t.end() }) // warn if deprecated switches/options are used -t.test('use a bunch of deprecated switches and options', t => { - process.argv = [ +t.test('use a bunch of deprecated switches and options', async t => { + const { argv, logs } = mockNpx(t, [ 'node', npx, '--npm', @@ -71,7 +71,7 @@ t.test('use a bunch of deprecated switches and options', t => { '--ignore-existing', '-q', 'foobar', - ] + ]) const expect = [ 'node', @@ -86,8 +86,7 @@ t.test('use a bunch of deprecated switches and options', t => { '--', 'foobar', ] - t.mock(npx, { [cli]: () => {} }) - t.strictSame(process.argv, expect) + t.strictSame(argv, expect) t.strictSame(logs, [ ['npx: the --npm argument has been removed.'], ['npx: the --node-arg argument has been removed.'], @@ -97,5 +96,4 @@ t.test('use a bunch of deprecated switches and options', t => { ['npx: the --ignore-existing argument has been removed.'], ['See `npm help exec` for more information'], ]) - t.end() }) diff --git a/deps/npm/test/fixtures/clean-snapshot.js b/deps/npm/test/fixtures/clean-snapshot.js index b0ea28cee4d814..83ddc00f4b7877 100644 --- a/deps/npm/test/fixtures/clean-snapshot.js +++ b/deps/npm/test/fixtures/clean-snapshot.js @@ -1,19 +1,43 @@ +const { relative, dirname } = require('path') + +// normalize line endings (for ini) +const cleanNewlines = (s) => s.replace(/\r\n/g, '\n') + // XXX: this also cleans quoted " in json snapshots // ideally this could be avoided but its easier to just // run this command inside cleanSnapshot -const normalizePath = (str) => str - .replace(/\r\n/g, '\n') // normalize line endings (for ini) +const normalizePath = (str) => cleanNewlines(str) .replace(/[A-z]:\\/g, '\\') // turn windows roots to posix ones .replace(/\\+/g, '/') // replace \ with / +const pathRegex = (p) => new RegExp(normalizePath(p), 'gi') + +// create a cwd replacer in the module scope, since some tests +// overwrite process.cwd() +const CWD = pathRegex(process.cwd()) +const TESTDIR = pathRegex(relative(process.cwd(), dirname(require.main.filename))) + const cleanCwd = (path) => normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') + // repalce CWD, TESTDIR, and TAPDIR separately + .replace(CWD, '{CWD}') + .replace(TESTDIR, '{TESTDIR}') + .replace(/tap-testdir-[\w-.]+/gi, '{TAPDIR}') + // if everything ended up in line, reduce it all to CWD + .replace(/\{CWD\}\/\{TESTDIR\}\/\{TAPDIR\}/g, '{CWD}') + // replace for platform differences in global nodemodules + .replace(/lib\/node_modules/g, 'node_modules') + .replace(/global\/lib/g, 'global') const cleanDate = (str) => str.replace(/\d{4}-\d{2}-\d{2}T\d{2}[_:]\d{2}[_:]\d{2}[_:.]\d{3}Z/g, '{DATE}') +const cleanTime = str => str.replace(/in [0-9]+m?s\s*$/gm, 'in {TIME}') + module.exports = { normalizePath, + pathRegex, cleanCwd, cleanDate, + cleanTime, + cleanNewlines, } diff --git a/deps/npm/test/fixtures/merge-conflict.json b/deps/npm/test/fixtures/merge-conflict.json new file mode 100644 index 00000000000000..2591c62efb37a0 --- /dev/null +++ b/deps/npm/test/fixtures/merge-conflict.json @@ -0,0 +1,36 @@ +{ + "array": [ +<<<<<<< HEAD + 100, + { + "foo": "baz" + }, +||||||| merged common ancestors + 1, +======= + 111, + 1, + 2, + 3, + { + "foo": "bar" + }, +>>>>>>> a + 1 + ], + "a": { + "b": { +<<<<<<< HEAD + "c": { + "x": "bbbb" + } +||||||| merged common ancestors + "c": { + "x": "aaaa" + } +======= + "c": "xxxx" +>>>>>>> a + } + } +} diff --git a/deps/npm/test/fixtures/mock-globals.js b/deps/npm/test/fixtures/mock-globals.js index 29da2a48b092d2..aec8a83963687a 100644 --- a/deps/npm/test/fixtures/mock-globals.js +++ b/deps/npm/test/fixtures/mock-globals.js @@ -4,23 +4,49 @@ // Hopefully it can be removed for a feature in tap in the future const sep = '.' +const escapeSep = '"' const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k) const opd = (o, k) => Object.getOwnPropertyDescriptor(o, k) const po = (o) => Object.getPrototypeOf(o) const pojo = (o) => Object.prototype.toString.call(o) === '[object Object]' const last = (arr) => arr[arr.length - 1] -const splitLast = (str) => str.split(new RegExp(`\\${sep}(?=[^${sep}]+$)`)) const dupes = (arr) => arr.filter((k, i) => arr.indexOf(k) !== i) const dupesStartsWith = (arr) => arr.filter((k1) => arr.some((k2) => k2.startsWith(k1 + sep))) +const splitLastSep = (str) => { + let escaped = false + for (let i = str.length - 1; i >= 0; i--) { + const c = str[i] + const cp = str[i + 1] + const cn = str[i - 1] + if (!escaped && c === escapeSep && (cp == null || cp === sep)) { + escaped = true + continue + } + if (escaped && c === escapeSep && cn === sep) { + escaped = false + continue + } + if (!escaped && c === sep) { + return [ + str.slice(0, i), + str.slice(i + 1).replace(new RegExp(`^${escapeSep}(.*)${escapeSep}$`), '$1'), + ] + } + } + return [str] +} + // A weird getter that can look up keys on nested objects but also // match keys with dots in their names, eg { 'process.env': { TERM: 'a' } } // can be looked up with the key 'process.env.TERM' const get = (obj, key, childKey = '') => { if (has(obj, key)) { return childKey ? get(obj[key], childKey) : obj[key] - } else if (key.includes(sep)) { - const [parentKey, prefix] = splitLast(key) + } + const split = splitLastSep(key) + if (split.length === 2) { + const [parentKey, prefix] = split return get( obj, parentKey, @@ -81,7 +107,7 @@ class DescriptorStack { #isDelete = (o) => o && o.DELETE === true constructor (key) { - const keys = splitLast(key) + const keys = splitLastSep(key) this.#global = keys.length === 1 ? global : get(global, keys[0]) this.#valueKey = specialCaseKeys(key) || last(keys) // If the global object doesnt return a descriptor for the key diff --git a/deps/npm/test/fixtures/mock-npm.js b/deps/npm/test/fixtures/mock-npm.js index 8a744cd559eaf0..2cada1354878c0 100644 --- a/deps/npm/test/fixtures/mock-npm.js +++ b/deps/npm/test/fixtures/mock-npm.js @@ -1,26 +1,81 @@ const os = require('os') const fs = require('fs').promises const path = require('path') +const tap = require('tap') +const errorMessage = require('../../lib/utils/error-message') const mockLogs = require('./mock-logs') const mockGlobals = require('./mock-globals') -const log = require('../../lib/utils/log-shim') -const envConfigKeys = Object.keys(require('../../lib/utils/config/definitions.js')) +const tmock = require('./tmock') +const defExitCode = process.exitCode + +const changeDir = (dir) => { + if (dir) { + const cwd = process.cwd() + process.chdir(dir) + return () => process.chdir(cwd) + } + return () => {} +} + +const setGlobalNodeModules = (globalDir) => { + const updateSymlinks = (obj, visit) => { + for (const [key, value] of Object.entries(obj)) { + if (/Fixture/.test(value.toString())) { + obj[key] = tap.fixture('symlink', path.join('..', value.content)) + } else if (typeof value === 'object') { + obj[key] = updateSymlinks(value, visit) + } + } + return obj + } + + if (globalDir.lib) { + throw new Error('`globalPrefixDir` should not have a top-level `lib/` directory, only a ' + + 'top-level `node_modules/` dir that gets set in the correct location based on platform. ' + + `Received the following top level entries: ${Object.keys(globalDir).join(', ')}.` + ) + } + + if (process.platform !== 'win32' && globalDir.node_modules) { + const { node_modules: nm, ...rest } = globalDir + return { + ...rest, + lib: { node_modules: updateSymlinks(nm) }, + } + } -const RealMockNpm = (t, otherMocks = {}) => { + return globalDir +} + +const getMockNpm = async (t, { mocks, init, load, npm: npmOpts }) => { const mock = { - ...mockLogs(otherMocks), + ...mockLogs(mocks), outputs: [], outputErrors: [], joinedOutput: () => mock.outputs.map(o => o.join(' ')).join('\n'), } - const Npm = t.mock('../../lib/npm.js', { - '../../lib/utils/update-notifier.js': async () => {}, - ...otherMocks, + const Npm = tmock(t, '{LIB}/npm.js', { + '{LIB}/utils/update-notifier.js': async () => {}, + ...mocks, ...mock.logMocks, }) - mock.Npm = class MockNpm extends Npm { + class MockNpm extends Npm { + async exec (...args) { + const [res, err] = await super.exec(...args).then((r) => [r]).catch(e => [null, e]) + // This mimics how the exit handler flushes output for commands that have + // buffered output. It also uses the same json error processing from the + // error message fn. This is necessary for commands with buffered output + // to read the output after exec is called. This is not *exactly* how it + // works in practice, but it is close enough for now. + this.flushOutput(err ? errorMessage(err, this).json : null) + if (err) { + throw err + } + return res + } + // lib/npm.js tests needs this to actually test the function! originalOutput (...args) { super.output(...args) @@ -39,77 +94,88 @@ const RealMockNpm = (t, otherMocks = {}) => { } } - return mock -} - -const setLoglevel = (t, loglevel, reset = true) => { - if (t && reset) { - const _level = log.level - t.teardown(() => log.level = _level) + mock.Npm = MockNpm + if (init) { + mock.npm = new MockNpm(npmOpts) + if (load) { + await mock.npm.load() + } } - if (loglevel) { - // Set log level on the npmlog singleton and shared across everything - log.level = loglevel - } + return mock } -// Resolve some options to a function call with supplied args -const result = (fn, ...args) => typeof fn === 'function' ? fn(...args) : fn +const mockNpms = new Map() -const LoadMockNpm = async (t, { +const setupMockNpm = async (t, { init = true, load = init, + // preload a command + command = null, // string name of the command + exec = null, // optionally exec the command before returning + // test dirs prefixDir = {}, homeDir = {}, cacheDir = {}, - globalPrefixDir = { lib: {} }, - config = {}, - mocks = {}, + globalPrefixDir = { node_modules: {} }, otherDirs = {}, - globals = null, + chdir = ({ prefix }) => prefix, + // setup config, env vars, mocks, npm opts + config: _config = {}, + mocks = {}, + globals = {}, + npm: npmOpts = {}, + argv: rawArgv = [], } = {}) => { - // Mock some globals with their original values so they get torn down - // back to the original at the end of the test since they are manipulated - // by npm itself - const npmConfigEnv = {} - for (const key in process.env) { - if (key.startsWith('npm_config_')) { - npmConfigEnv[key] = undefined + // easy to accidentally forget to pass in tap + if (!(t instanceof tap.Test)) { + throw new Error('first argument must be a tap instance') + } + + // mockNpm is designed to only be run once per test chain so we assign it to + // the test in the cache and error if it is attempted to run again + let tapInstance = t + while (tapInstance) { + if (mockNpms.has(tapInstance)) { + throw new Error('mockNpm can only be called once in each t.test chain') } + tapInstance = tapInstance.parent } + mockNpms.set(t, true) + + if (!init && load) { + throw new Error('cant `load` without `init`') + } + + // These are globals manipulated by npm itself that we need to reset to their + // original values between tests + const npmEnvs = Object.keys(process.env).filter(k => k.startsWith('npm_')) mockGlobals(t, { process: { title: process.title, execPath: process.execPath, env: { - npm_command: process.env.npm_command, + NODE_ENV: process.env.NODE_ENV, COLOR: process.env.COLOR, - ...npmConfigEnv, + // further, these are npm controlled envs that we need to zero out before + // before the test. setting them to undefined ensures they are not set and + // also returned to their original value after the test + ...npmEnvs.reduce((acc, k) => { + acc[k] = undefined + return acc + }, {}), }, }, }) - const { Npm, ...rest } = RealMockNpm(t, mocks) - - // We want to fail fast when writing tests. Default this to 0 unless it was - // explicitly set in a test. - config = { 'fetch-retries': 0, ...config } - - if (!init && load) { - throw new Error('cant `load` without `init`') - } - - // Set log level as early as possible since - setLoglevel(t, config.loglevel) - const dir = t.testdir({ home: homeDir, prefix: prefixDir, cache: cacheDir, - global: globalPrefixDir, + global: setGlobalNodeModules(globalPrefixDir), other: otherDirs, }) + const dirs = { testdir: dir, prefix: path.join(dir, 'prefix'), @@ -119,52 +185,93 @@ const LoadMockNpm = async (t, { other: path.join(dir, 'other'), } - // Set cache to testdir via env var so it is available when load is run - // XXX: remove this for a solution where cache argv is passed in + // Option objects can also be functions that are called with all the dir paths + // so they can be used to set configs that need to be based on paths + const withDirs = (v) => typeof v === 'function' ? v(dirs) : v + + const teardownDir = changeDir(withDirs(chdir)) + + const defaultConfigs = { + // We want to fail fast when writing tests. Default this to 0 unless it was + // explicitly set in a test. + 'fetch-retries': 0, + cache: dirs.cache, + } + + const { argv, env, config } = Object.entries({ ...defaultConfigs, ...withDirs(_config) }) + .reduce((acc, [key, value]) => { + // nerfdart configs passed in need to be set via env var instead of argv + // and quoted with `"` so mock globals will ignore that it contains dots + if (key.startsWith('//')) { + acc.env[`process.env."npm_config_${key}"`] = value + } else { + const values = [].concat(value) + acc.argv.push(...values.flatMap(v => [`--${key}`, v.toString()])) + } + acc.config[key] = value + return acc + }, { argv: [...rawArgv], env: {}, config: {} }) + mockGlobals(t, { 'process.env.HOME': dirs.home, - 'process.env.npm_config_cache': dirs.cache, - ...(globals ? result(globals, { ...dirs }) : {}), - // Some configs don't work because they can't be set via npm.config.set until - // config is loaded. But some config items are needed before that. So this is - // an explicit set of configs that must be loaded as env vars. - // XXX(npm9): make this possible by passing in argv directly to npm/config - ...Object.entries(config) - .filter(([k]) => envConfigKeys.includes(k)) - .reduce((acc, [k, v]) => { - acc[`process.env.npm_config_${k.replace(/-/g, '_')}`] = - result(v, { ...dirs }).toString() - return acc - }, {}), + // global prefix cannot be (easily) set via argv so this is the easiest way + // to set it that also closely mimics the behavior a user would see since it + // will already be set while `npm.load()` is being run + // Note that this only sets the global prefix and the prefix is set via chdir + 'process.env.PREFIX': dirs.globalPrefix, + ...withDirs(globals), + ...env, }) - const npm = init ? new Npm() : null + const { npm, ...mockNpm } = await getMockNpm(t, { + init, + load, + mocks: withDirs(mocks), + npm: { argv, excludeNpmCwd: true, ...withDirs(npmOpts) }, + }) + + if (config.omit?.includes('prod')) { + // XXX(HACK): --omit=prod is not a valid config according to the definitions but + // it was being hacked in via flatOptions for older tests so this is to + // preserve that behavior and reduce churn in the snapshots. this should be + // removed or fixed in the future + npm.flatOptions.omit.push('prod') + } + t.teardown(() => { - npm && npm.unload() + if (npm) { + npm.unload() + } + // only set exitCode back if we're passing tests + if (t.passing()) { + process.exitCode = defExitCode + } + teardownDir() }) - if (load) { - await npm.load() - for (const [k, v] of Object.entries(result(config, { npm, ...dirs }))) { - if (typeof v === 'object' && v.value && v.where) { - npm.config.set(k, v.value, v.where) - } else { - npm.config.set(k, v) - } + const mockCommand = {} + if (command) { + const cmd = await npm.cmd(command) + const usage = await cmd.usage + mockCommand.cmd = cmd + mockCommand[command] = { + usage, + exec: (args) => npm.exec(command, args), + completion: (args) => cmd.completion(args), + } + if (exec) { + await mockCommand[command].exec(exec) + // assign string output to the command now that we have it + // for easier testing + mockCommand[command].output = mockNpm.joinedOutput() } - // Set global loglevel *again* since it possibly got reset during load - // XXX: remove with npmlog - setLoglevel(t, config.loglevel, false) - npm.prefix = dirs.prefix - npm.cache = dirs.cache - npm.globalPrefix = dirs.globalPrefix } return { - ...rest, - ...dirs, - Npm, npm, + ...mockNpm, + ...dirs, + ...mockCommand, debugFile: async () => { const readFiles = npm.logFiles.map(f => fs.readFile(f)) const logFiles = await Promise.all(readFiles) @@ -180,80 +287,6 @@ const LoadMockNpm = async (t, { } } -const realConfig = require('../../lib/utils/config') - -// Basic npm fixture that you can give a config object that acts like -// npm.config You still need a separate flatOptions. Tests should migrate to -// using the real npm mock above -class MockNpm { - constructor (base = {}, t) { - this._mockOutputs = [] - this.isMockNpm = true - this.base = base - - const config = base.config || {} - - for (const attr in base) { - if (attr !== 'config') { - this[attr] = base[attr] - } - } - - this.flatOptions = base.flatOptions || {} - this.config = { - // for now just set `find` to what config.find should return - // this works cause `find` is not an existing config entry - find: (k) => ({ ...realConfig.defaults, ...config })[k], - // for now isDefault is going to just return false if a value was defined - isDefault: (k) => !Object.prototype.hasOwnProperty.call(config, k), - get: (k) => ({ ...realConfig.defaults, ...config })[k], - set: (k, v) => { - config[k] = v - // mock how real npm derives silent - if (k === 'loglevel') { - this.flatOptions.silent = v === 'silent' - this.silent = v === 'silent' - } - }, - list: [{ ...realConfig.defaults, ...config }], - validate: () => {}, - } - - if (t && config.loglevel) { - setLoglevel(t, config.loglevel) - } - - if (config.loglevel) { - this.config.set('loglevel', config.loglevel) - } - } - - get global () { - return this.config.get('global') || this.config.get('location') === 'global' - } - - output (...msg) { - if (this.base.output) { - return this.base.output(msg) - } - this._mockOutputs.push(msg) - } - - // with the older fake mock npm there is no - // difference between output and outputBuffer - // since it just collects the output and never - // calls the exit handler, so we just mock the - // method the same as output. - outputBuffer (...msg) { - this.output(...msg) - } -} - -const FakeMockNpm = (base = {}, t) => { - return new MockNpm(base, t) -} - -module.exports = { - fake: FakeMockNpm, - load: LoadMockNpm, -} +module.exports = setupMockNpm +module.exports.load = setupMockNpm +module.exports.setGlobalNodeModules = setGlobalNodeModules diff --git a/deps/npm/test/fixtures/sandbox.js b/deps/npm/test/fixtures/sandbox.js index c7bb8218dc60a0..460609628c8abb 100644 --- a/deps/npm/test/fixtures/sandbox.js +++ b/deps/npm/test/fixtures/sandbox.js @@ -2,9 +2,7 @@ const { createHook, executionAsyncId } = require('async_hooks') const { EventEmitter } = require('events') const { homedir, tmpdir } = require('os') const { dirname, join } = require('path') -const { promisify } = require('util') -const { mkdir } = require('fs/promises') -const rimraf = promisify(require('rimraf')) +const { mkdir, rm } = require('fs/promises') const mockLogs = require('./mock-logs') const pkg = require('../../package.json') @@ -201,7 +199,7 @@ class Sandbox extends EventEmitter { if (this[_npm]) { this[_npm].unload() } - return rimraf(this[_dirs].temp).catch(() => null) + return rm(this[_dirs].temp, { recursive: true, force: true }).catch(() => null) } // proxy get handler diff --git a/deps/npm/test/fixtures/tmock.js b/deps/npm/test/fixtures/tmock.js new file mode 100644 index 00000000000000..321e8bc07c581c --- /dev/null +++ b/deps/npm/test/fixtures/tmock.js @@ -0,0 +1,27 @@ +const path = require('path') + +const ROOT = path.resolve(__dirname, '../..') +const BIN = path.join(ROOT, 'bin') +const LIB = path.join(ROOT, 'lib') + +// since mock npm changes directories it can be hard to figure out the +// correct path to mock something with tap since the directory will change +// before/after npm is loaded. This helper replaces {BIN} and {LIB} with +// the absolute path to those directories +const replace = (s) => { + if (/^[./{]/.test(s)) { + return s + .replace(/^\{BIN\}/, BIN) + .replace(/^\{LIB\}/, LIB) + .replace(/^\{ROOT\}/, ROOT) + } else { + return require.resolve(s) + } +} + +const tmock = (t, p, mocks = {}) => { + const entries = Object.entries(mocks).map(([k, v]) => [replace(k), v]) + return t.mock(replace(p), Object.fromEntries(entries)) +} + +module.exports = tmock diff --git a/deps/npm/test/index.js b/deps/npm/test/index.js index 747d75b5fd4c0b..44fb0989df4258 100644 --- a/deps/npm/test/index.js +++ b/deps/npm/test/index.js @@ -1,34 +1,20 @@ const t = require('tap') +const spawn = require('@npmcli/promise-spawn') const index = require.resolve('../index.js') const packageIndex = require.resolve('../') +const { load: loadMockNpm } = require('./fixtures/mock-npm') t.equal(index, packageIndex, 'index is main package require() export') t.throws(() => require(index), { message: 'The programmatic API was removed in npm v8.0.0', }) -t.test('loading as main module will load the cli', t => { - const cwd = t.testdir() - const { spawn } = require('child_process') +t.test('loading as main module will load the cli', async t => { + const { npm, cache } = await loadMockNpm(t) const LS = require('../lib/commands/ls.js') - const ls = new LS({ - config: { - validate: () => {}, - get: (key) => { - if (key === 'location') { - return 'project' - } - }, - isDefault: () => {}, - }, - }) - const p = spawn(process.execPath, [index, 'ls', '-h', '--cache', cwd]) - const out = [] - p.stdout.on('data', c => out.push(c)) - p.on('close', (code, signal) => { - t.equal(code, 0) - t.equal(signal, null) - t.match(Buffer.concat(out).toString(), ls.usage) - t.end() - }) + const ls = new LS(npm) + const p = await spawn(process.execPath, [index, 'ls', '-h', '--cache', cache]) + t.equal(p.code, 0) + t.equal(p.signal, null) + t.match(p.stdout, ls.usage) }) diff --git a/deps/npm/test/lib/arborist-cmd.js b/deps/npm/test/lib/arborist-cmd.js index f3c1d2573d33fd..36c697cd9e8fd8 100644 --- a/deps/npm/test/lib/arborist-cmd.js +++ b/deps/npm/test/lib/arborist-cmd.js @@ -1,115 +1,140 @@ const { resolve } = require('path') const t = require('tap') -const ArboristCmd = require('../../lib/arborist-cmd.js') +const { load: loadMockNpm } = require('../fixtures/mock-npm') +const tmock = require('../fixtures/tmock') -const configMock = { - validate: () => {}, - get: (key) => { - if (key === 'location') { - return 'project' - } - }, - isDefault: () => {}, -} +const mockArboristCmd = async (t, exec, workspace, { mocks = {}, ...opts } = {}) => { + const ArboristCmd = tmock(t, '{LIB}/arborist-cmd.js', mocks) -t.test('arborist-cmd', async t => { - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'simple-workspaces-list', - version: '1.1.1', - workspaces: [ - 'a', - 'b', - 'group/*', - ], - }), - node_modules: { - abbrev: { - 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1' }), + const config = (typeof workspace === 'function') + ? (dirs) => ({ workspace: workspace(dirs) }) + : { workspace } + + const mock = await loadMockNpm(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'simple-workspaces-list', + version: '1.1.1', + workspaces: [ + 'a', + 'b', + 'group/*', + ], + }), + node_modules: { + abbrev: { + 'package.json': JSON.stringify({ name: 'abbrev', version: '1.1.1' }), + }, + a: t.fixture('symlink', '../a'), + b: t.fixture('symlink', '../b'), }, - a: t.fixture('symlink', '../a'), - b: t.fixture('symlink', '../b'), - }, - a: { - 'package.json': JSON.stringify({ name: 'a', version: '1.0.0' }), - }, - b: { - 'package.json': JSON.stringify({ name: 'b', version: '1.0.0' }), - }, - group: { - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - dependencies: { - abbrev: '^1.1.1', - }, - }), + a: { + 'package.json': JSON.stringify({ name: 'a', version: '1.0.0' }), + }, + b: { + 'package.json': JSON.stringify({ name: 'b', version: '1.0.0' }), }, - d: { - 'package.json': JSON.stringify({ name: 'd', version: '1.0.0' }), + group: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + dependencies: { + abbrev: '^1.1.1', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ name: 'd', version: '1.0.0' }), + }, }, }, + ...opts, }) - class TestCmd extends ArboristCmd {} - - const cmd = new TestCmd({ localPrefix: path, config: configMock }) - - // check filtering for a single workspace name - cmd.exec = async function (args) { - t.same(this.workspaceNames, ['a'], 'should set array with single ws name') - t.same(args, ['foo'], 'should get received args') + let execArg + class TestCmd extends ArboristCmd { + async exec (arg) { + execArg = arg + } } - await cmd.execWorkspaces(['foo'], ['a']) - // check filtering single workspace by path - cmd.exec = async function (args) { - t.same(this.workspaceNames, ['a'], - 'should set array with single ws name from path') + const cmd = new TestCmd(mock.npm) + if (exec) { + await cmd.execWorkspaces(exec) } - await cmd.execWorkspaces([], ['./a']) - // check filtering single workspace by full path - cmd.exec = function (args) { - t.same(this.workspaceNames, ['a'], - 'should set array with single ws name from full path') - } - await cmd.execWorkspaces([], [resolve(path, './a')]) + return { ...mock, cmd, getArg: () => execArg } +} - // filtering multiple workspaces by name - cmd.exec = async function (args) { - t.same(this.workspaceNames, ['a', 'c'], - 'should set array with multiple listed ws names') - } - await cmd.execWorkspaces([], ['a', 'c']) +t.test('arborist-cmd', async t => { + await t.test('single name', async t => { + const { cmd, getArg } = await mockArboristCmd(t, ['foo'], 'a') - // filtering multiple workspaces by path names - cmd.exec = async function (args) { - t.same(this.workspaceNames, ['a', 'c'], - 'should set array with multiple ws names from paths') - } - await cmd.execWorkspaces([], ['./a', 'group/c']) + t.same(cmd.workspaceNames, ['a'], 'should set array with single ws name') + t.same(getArg(), ['foo'], 'should get received args') + }) - // filtering multiple workspaces by parent path name - cmd.exec = async function (args) { - t.same(this.workspaceNames, ['c', 'd'], - 'should set array with multiple ws names from a parent folder name') - } - await cmd.execWorkspaces([], ['./group']) + await t.test('single path', async t => { + const { cmd } = await mockArboristCmd(t, [], './a') + + t.same(cmd.workspaceNames, ['a'], 'should set array with single ws name') + }) + + await t.test('single full path', async t => { + const { cmd } = await mockArboristCmd(t, [], ({ prefix }) => resolve(prefix, 'a')) + + t.same(cmd.workspaceNames, ['a'], 'should set array with single ws name') + }) + + await t.test('multiple names', async t => { + const { cmd } = await mockArboristCmd(t, [], ['a', 'c']) + + t.same(cmd.workspaceNames, ['a', 'c'], 'should set array with single ws name') + }) + + await t.test('multiple paths', async t => { + const { cmd } = await mockArboristCmd(t, [], ['./a', 'group/c']) + + t.same(cmd.workspaceNames, ['a', 'c'], 'should set array with single ws name') + }) + + await t.test('parent path', async t => { + const { cmd } = await mockArboristCmd(t, [], './group') + + t.same(cmd.workspaceNames, ['c', 'd'], 'should set array with single ws name') + }) + + await t.test('parent path', async t => { + const { cmd } = await mockArboristCmd(t, [], './group') + + t.same(cmd.workspaceNames, ['c', 'd'], 'should set array with single ws name') + }) + + await t.test('prefix inside cwd', async t => { + const { npm, cmd, prefix } = await mockArboristCmd(t, null, ['a', 'c'], { + chdir: (dirs) => dirs.testdir, + }) + + npm.localPrefix = prefix + await cmd.execWorkspaces([]) + + t.same(cmd.workspaceNames, ['a', 'c'], 'should set array with single ws name') + }) }) t.test('handle getWorkspaces raising an error', async t => { - const ArboristCmd = t.mock('../../lib/arborist-cmd.js', { - '../../lib/workspaces/get-workspaces.js': async () => { - throw new Error('oopsie') + const { cmd } = await mockArboristCmd(t, null, 'a', { + mocks: { + '{LIB}/workspaces/get-workspaces.js': async () => { + throw new Error('oopsie') + }, }, }) - class TestCmd extends ArboristCmd {} - const cmd = new TestCmd({ localPrefix: t.testdir(), config: configMock }) await t.rejects( - cmd.execWorkspaces(['foo'], ['a']), + cmd.execWorkspaces(['foo']), { message: 'oopsie' } ) }) diff --git a/deps/npm/test/lib/cli.js b/deps/npm/test/lib/cli.js index 42a22a20b39643..28640a226065e5 100644 --- a/deps/npm/test/lib/cli.js +++ b/deps/npm/test/lib/cli.js @@ -1,6 +1,6 @@ const t = require('tap') - const { load: loadMockNpm } = require('../fixtures/mock-npm.js') +const tmock = require('../fixtures/tmock') const cliMock = async (t, opts) => { let exitHandlerArgs = null @@ -12,9 +12,9 @@ const cliMock = async (t, opts) => { exitHandlerMock.setNpm = _npm => npm = _npm const { Npm, outputs, logMocks, logs } = await loadMockNpm(t, { ...opts, init: false }) - const cli = t.mock('../../lib/cli.js', { - '../../lib/npm.js': Npm, - '../../lib/utils/exit-handler.js': exitHandlerMock, + const cli = tmock(t, '{LIB}/cli.js', { + '{LIB}/npm.js': Npm, + '{LIB}/utils/exit-handler.js': exitHandlerMock, ...logMocks, }) @@ -29,10 +29,6 @@ const cliMock = async (t, opts) => { } } -t.afterEach(() => { - process.exitCode = undefined -}) - t.test('print the version, and treat npm_g as npm -g', async t => { const { logsBy, logs, cli, Npm, outputs, exitHandlerCalled } = await cliMock(t, { globals: { 'process.argv': ['node', 'npm_g', '-v'] }, @@ -42,24 +38,18 @@ t.test('print the version, and treat npm_g as npm -g', async t => { t.strictSame(process.argv, ['node', 'npm', '-g', '-v'], 'system process.argv was rewritten') t.strictSame(logsBy('cli'), [['node npm']]) t.strictSame(logsBy('title'), [['npm']]) - t.strictSame(logsBy('argv'), [['"--global" "--version"']]) + t.match(logsBy('argv'), [['"--global" "--version"']]) t.strictSame(logs.info, [ ['using', 'npm@%s', Npm.version], ['using', 'node@%s', process.version], ]) + t.equal(outputs.length, 1) t.strictSame(outputs, [[Npm.version]]) t.strictSame(exitHandlerCalled(), []) }) t.test('calling with --versions calls npm version with no args', async t => { const { logsBy, cli, outputs, exitHandlerCalled } = await cliMock(t, { - mocks: { - '../../lib/commands/version.js': class Version { - async exec (args) { - t.strictSame(args, []) - } - }, - }, globals: { 'process.argv': ['node', 'npm', 'install', 'or', 'whatever', '--versions'], }, @@ -69,18 +59,14 @@ t.test('calling with --versions calls npm version with no args', async t => { t.equal(process.title, 'npm install or whatever') t.strictSame(logsBy('cli'), [['node npm']]) t.strictSame(logsBy('title'), [['npm install or whatever']]) - t.strictSame(logsBy('argv'), [['"install" "or" "whatever" "--versions"']]) - t.strictSame(outputs, []) + t.match(logsBy('argv'), [['"install" "or" "whatever" "--versions"']]) + t.equal(outputs.length, 1) + t.match(outputs[0][0], { npm: String, node: String, v8: String }) t.strictSame(exitHandlerCalled(), []) }) t.test('logged argv is sanitized', async t => { const { logsBy, cli } = await cliMock(t, { - mocks: { - '../../lib/commands/version.js': class Version { - async exec () {} - }, - }, globals: { 'process.argv': [ 'node', @@ -96,16 +82,11 @@ t.test('logged argv is sanitized', async t => { t.equal(process.title, 'npm version') t.strictSame(logsBy('cli'), [['node npm']]) t.strictSame(logsBy('title'), [['npm version']]) - t.strictSame(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/password"']]) + t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/password"']]) }) t.test('logged argv is sanitized with equals', async t => { const { logsBy, cli } = await cliMock(t, { - mocks: { - '../../lib/commands/version.js': class Version { - async exec () {} - }, - }, globals: { 'process.argv': [ 'node', @@ -117,7 +98,7 @@ t.test('logged argv is sanitized with equals', async t => { }) await cli(process) - t.strictSame(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org"']]) + t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org"']]) }) t.test('print usage if no params provided', async t => { @@ -153,7 +134,7 @@ t.test('load error calls error handler', async t => { const err = new Error('test load error') const { cli, exitHandlerCalled } = await cliMock(t, { mocks: { - '../../lib/utils/config/index.js': { + '{LIB}/utils/config/index.js': { definitions: null, flatten: null, shorthands: null, diff --git a/deps/npm/test/lib/commands/audit.js b/deps/npm/test/lib/commands/audit.js index 02b00f7f9ad88a..bba74407cb3fef 100644 --- a/deps/npm/test/lib/commands/audit.js +++ b/deps/npm/test/lib/commands/audit.js @@ -86,7 +86,6 @@ t.test('normal audit', async t => { await npm.exec('audit', []) t.ok(process.exitCode, 'would have exited uncleanly') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -135,7 +134,6 @@ t.test('fallback audit ', async t => { }) await npm.exec('audit', []) t.ok(process.exitCode, 'would have exited uncleanly') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -165,7 +163,6 @@ t.test('json audit', async t => { await npm.exec('audit', []) t.ok(process.exitCode, 'would have exited uncleanly') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -762,7 +759,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -796,7 +792,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -903,7 +898,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /audited 3 packages/) t.match(joinedOutput(), /2 packages have verified registry signatures/) t.match(joinedOutput(), /1 package has an invalid registry signature/) @@ -921,7 +915,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -937,7 +930,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /invalid registry signature/) t.match(joinedOutput(), /kms-demo@1.0.0/) t.matchSnapshot(joinedOutput()) @@ -955,7 +947,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /audited 2 packages/) t.match(joinedOutput(), /verified registry signature/) t.match(joinedOutput(), /missing registry signature/) @@ -974,7 +965,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /audited 2 packages/) t.match(joinedOutput(), /invalid/) t.match(joinedOutput(), /missing/) @@ -993,7 +983,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -1009,7 +998,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -1069,7 +1057,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match( joinedOutput(), /registry is providing signing keys/ @@ -1088,7 +1075,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match( joinedOutput(), /kms-demo/ @@ -1110,7 +1096,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), JSON.stringify({ invalid: [], missing: [] }, null, 2)) t.matchSnapshot(joinedOutput()) }) @@ -1129,7 +1114,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -1148,7 +1132,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.matchSnapshot(joinedOutput()) }) @@ -1166,7 +1149,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -1176,7 +1158,8 @@ t.test('audit signatures', async t => { const { npm } = await loadMockNpm(t, { prefixDir: installWithThirdPartyRegistry, config: { - '@npmcli:registry': registryUrl, + scope: '@npmcli', + registry: registryUrl, }, }) const registry = new MockRegistry({ tap: t, registry: registryUrl }) @@ -1205,7 +1188,8 @@ t.test('audit signatures', async t => { const { npm } = await loadMockNpm(t, { prefixDir: installWithThirdPartyRegistry, config: { - '@npmcli:registry': registryUrl, + scope: '@npmcli', + registry: registryUrl, }, }) const registry = new MockRegistry({ tap: t, registry: registryUrl }) @@ -1234,7 +1218,8 @@ t.test('audit signatures', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: installWithThirdPartyRegistry, config: { - '@npmcli:registry': registryUrl, + scope: '@npmcli', + registry: registryUrl, }, }) const registry = new MockRegistry({ tap: t, registry: registryUrl }) @@ -1273,7 +1258,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -1283,7 +1267,8 @@ t.test('audit signatures', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: installWithThirdPartyRegistry, config: { - '@npmcli:registry': registryUrl, + scope: '@npmcli', + registry: registryUrl, }, }) const registry = new MockRegistry({ tap: t, registry: registryUrl }) @@ -1321,7 +1306,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /https:\/\/verdaccio-clone.org/) t.matchSnapshot(joinedOutput()) }) @@ -1331,7 +1315,8 @@ t.test('audit signatures', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: installWithThirdPartyRegistry, config: { - '@npmcli:registry': registryUrl, + scope: '@npmcli', + registry: registryUrl, }, }) const registry = new MockRegistry({ tap: t, registry: registryUrl }) @@ -1363,7 +1348,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match(joinedOutput(), /1 package has a missing registry signature/) t.matchSnapshot(joinedOutput()) }) @@ -1371,9 +1355,9 @@ t.test('audit signatures', async t => { t.test('multiple registries with keys and signatures', async t => { const registryUrl = 'https://verdaccio-clone.org' const { npm, joinedOutput } = await loadMockNpm(t, { - prefixDir: installWithMultipleRegistries, - config: { - '@npmcli:registry': registryUrl, + prefixDir: { + ...installWithMultipleRegistries, + '.npmrc': `@npmcli:registry=${registryUrl}\n`, }, }) const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') }) @@ -1418,7 +1402,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 2 packages/) t.matchSnapshot(joinedOutput()) }) @@ -1465,7 +1448,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 1 package/) t.matchSnapshot(joinedOutput()) }) @@ -1586,7 +1568,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 1, 'should exit with error') - process.exitCode = 0 t.match( joinedOutput(), // eslint-disable-next-line no-control-regex @@ -1645,7 +1626,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 3 packages/) t.matchSnapshot(joinedOutput()) }) @@ -1653,7 +1633,7 @@ t.test('audit signatures', async t => { t.test('verifies registry deps when filtering by workspace name', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceInstall, - config: { workspace: ['./packages/a'] }, + config: { workspace: './packages/a' }, }) const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') }) const asyncManifest = registry.manifest({ @@ -1699,7 +1679,6 @@ t.test('audit signatures', async t => { await npm.exec('audit', ['signatures']) t.equal(process.exitCode, 0, 'should exit successfully') - process.exitCode = 0 t.match(joinedOutput(), /audited 2 packages/) t.matchSnapshot(joinedOutput()) }) diff --git a/deps/npm/test/lib/commands/bugs.js b/deps/npm/test/lib/commands/bugs.js index 91d144b6bdc970..bf45b9eee81ab3 100644 --- a/deps/npm/test/lib/commands/bugs.js +++ b/deps/npm/test/lib/commands/bugs.js @@ -1,79 +1,71 @@ const t = require('tap') +const { load: loadMockNpm } = require('../../fixtures/mock-npm') const pacote = { - manifest: async (spec, options) => { + manifest: async (spec) => { return spec === 'nobugs' ? { name: 'nobugs', version: '1.2.3', - } - : spec === 'bugsurl' ? { - name: 'bugsurl', - version: '1.2.3', - bugs: 'https://bugzilla.localhost/bugsurl', - } - : spec === 'bugsobj' ? { - name: 'bugsobj', - version: '1.2.3', - bugs: { url: 'https://bugzilla.localhost/bugsobj' }, - } - : spec === 'bugsobj-nourl' ? { - name: 'bugsobj-nourl', - version: '1.2.3', - bugs: { no: 'url here' }, - } - : spec === 'repourl' ? { - name: 'repourl', - version: '1.2.3', - repository: 'https://github.com/foo/repourl', - } - : spec === 'repoobj' ? { - name: 'repoobj', - version: '1.2.3', - repository: { url: 'https://github.com/foo/repoobj' }, - } - : spec === 'mailtest' ? { - name: 'mailtest', - version: '3.7.4', - bugs: { email: 'hello@example.com' }, - } - : spec === 'secondmailtest' ? { - name: 'secondmailtest', - version: '0.1.1', - bugs: { email: 'ABC432abc@a.b.example.net' }, - } - : spec === '.' ? { - name: 'thispkg', - version: '1.2.3', - bugs: 'https://example.com', - } - : null + } : spec === 'bugsurl' ? { + name: 'bugsurl', + version: '1.2.3', + bugs: 'https://bugzilla.localhost/bugsurl', + } : spec === 'bugsobj' ? { + name: 'bugsobj', + version: '1.2.3', + bugs: { url: 'https://bugzilla.localhost/bugsobj' }, + } : spec === 'bugsobj-nourl' ? { + name: 'bugsobj-nourl', + version: '1.2.3', + bugs: { no: 'url here' }, + } : spec === 'repourl' ? { + name: 'repourl', + version: '1.2.3', + repository: 'https://github.com/foo/repourl', + } : spec === 'repoobj' ? { + name: 'repoobj', + version: '1.2.3', + repository: { url: 'https://github.com/foo/repoobj' }, + } : spec === 'mailtest' ? { + name: 'mailtest', + version: '3.7.4', + bugs: { email: 'hello@example.com' }, + } : spec === 'secondmailtest' ? { + name: 'secondmailtest', + version: '0.1.1', + bugs: { email: 'ABC432abc@a.b.example.net' }, + } : spec === '.' ? { + name: 'thispkg', + version: '1.2.3', + bugs: 'https://example.com', + } : null }, } -// keep a tally of which urls got opened -let opened = {} -const openUrl = async (npm, url, errMsg) => { - opened[url] = opened[url] || 0 - opened[url]++ -} - -const Bugs = t.mock('../../../lib/commands/bugs.js', { - pacote, - '../../../lib/utils/open-url.js': openUrl, +t.test('usage', async (t) => { + const { npm } = await loadMockNpm(t) + const bugs = await npm.cmd('bugs') + t.match(bugs.usage, 'bugs', 'usage has command name in it') }) -const bugs = new Bugs({ flatOptions: {}, config: { validate: () => {} } }) +t.test('open bugs urls & emails', async t => { + // keep a tally of which urls got opened + let opened = {} -t.test('usage', (t) => { - t.match(bugs.usage, 'bugs', 'usage has command name in it') - t.end() -}) + const openUrl = async (_, url) => { + opened[url] = opened[url] || 0 + opened[url]++ + } -t.afterEach(() => { - opened = {} -}) -t.test('open bugs urls & emails', t => { - const expect = { + const { npm } = await loadMockNpm(t, { + mocks: { + pacote, + '{LIB}/utils/open-url.js': openUrl, + }, + }) + + const expected = { + '.': 'https://example.com', nobugs: 'https://www.npmjs.com/package/nobugs', 'bugsobj-nourl': 'https://www.npmjs.com/package/bugsobj-nourl', bugsurl: 'https://bugzilla.localhost/bugsurl', @@ -82,19 +74,19 @@ t.test('open bugs urls & emails', t => { repoobj: 'https://github.com/foo/repoobj/issues', mailtest: 'mailto:hello@example.com', secondmailtest: 'mailto:ABC432abc@a.b.example.net', - '.': 'https://example.com', } - const keys = Object.keys(expect) - t.plan(keys.length) - keys.forEach(pkg => { - t.test(pkg, async t => { - await bugs.exec([pkg]) - t.equal(opened[expect[pkg]], 1, 'opened expected url', { opened }) + + for (const [pkg, expect] of Object.entries(expected)) { + await t.test(pkg, async t => { + await npm.exec('bugs', [pkg]) + t.equal(opened[expect], 1, 'opened expected url', { opened }) }) - }) -}) + } -t.test('open default package if none specified', async t => { - await bugs.exec([]) - t.equal(opened['https://example.com'], 1, 'opened expected url', { opened }) + opened = {} + + await t.test('open default package if none specified', async t => { + await npm.exec('bugs', []) + t.equal(opened['https://example.com'], 1, 'opened expected url', { opened }) + }) }) diff --git a/deps/npm/test/lib/commands/config.js b/deps/npm/test/lib/commands/config.js index 35872e722e17e3..f2bdcc7231ddf9 100644 --- a/deps/npm/test/lib/commands/config.js +++ b/deps/npm/test/lib/commands/config.js @@ -26,16 +26,10 @@ t.test('config ignores workspaces', async t => { await t.rejects( sandbox.run('config', ['--workspaces']), { - code: 'EUSAGE', + code: 'ENOWORKSPACES', }, 'rejects with usage' ) - - t.match( - sandbox.logs.warn, - [['config', 'This command does not support workspaces.']], - 'logged the warning' - ) }) t.test('config list', async t => { diff --git a/deps/npm/test/lib/commands/diff.js b/deps/npm/test/lib/commands/diff.js index 0ca9c3b8d078b3..d9ff9e5dad0e6a 100644 --- a/deps/npm/test/lib/commands/diff.js +++ b/deps/npm/test/lib/commands/diff.js @@ -1,1164 +1,1041 @@ const t = require('tap') -const { resolve, join } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const { join, extname } = require('path') +const MockRegistry = require('@npmcli/mock-registry') +const { load: loadMockNpm } = require('../../fixtures/mock-npm') + +const jsonifyTestdir = (obj) => { + for (const [key, value] of Object.entries(obj || {})) { + if (extname(key) === '.json') { + obj[key] = JSON.stringify(value, null, 2) + '\n' + } else if (typeof value === 'object') { + obj[key] = jsonifyTestdir(value) + } else { + obj[key] = value.trim() + '\n' + } + } + return obj +} -const noop = () => null -let libnpmdiff = noop +// generic helper to call diff with a specified dir contents and registry calls +const mockDiff = async (t, { + exec, + diff = [], + tarballs = {}, + times = {}, + ...opts +} = {}) => { + const tarballFixtures = Object.entries(tarballs).reduce((acc, [spec, fixture]) => { + const [name, version] = spec.split('@') + acc[name] = acc[name] || {} + acc[name][version] = fixture + if (!acc[name][version]['package.json']) { + acc[name][version]['package.json'] = { name, version } + } else { + acc[name][version]['package.json'].name = name + acc[name][version]['package.json'].version = version + } + return acc + }, {}) + + const { prefixDir, globalPrefixDir, otherDirs, config, ...rest } = opts + const { npm, ...res } = await loadMockNpm(t, { + prefixDir: jsonifyTestdir(prefixDir), + otherDirs: jsonifyTestdir({ tarballs: tarballFixtures, ...otherDirs }), + globalPrefixDir: jsonifyTestdir(globalPrefixDir), + config: { + ...config, + diff: [].concat(diff), + }, + ...rest, + }) -const config = { - global: false, - tag: 'latest', - diff: [], -} -const flatOptions = { - global: false, - diffUnified: null, - diffIgnoreAllSpace: false, - diffNoPrefix: false, - diffSrcPrefix: '', - diffDstPrefix: '', - diffText: false, - savePrefix: '^', -} -const fooPath = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0' }), -}) -const npm = mockNpm({ - prefix: fooPath, - config, - flatOptions, - output: noop, -}) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + strict: true, + debug: true, + }) + + const manifests = Object.entries(tarballFixtures).reduce((acc, [name, versions]) => { + acc[name] = registry.manifest({ + name, + packuments: Object.keys(versions).map((version) => ({ version })), + }) + return acc + }, {}) + + for (const [name, manifest] of Object.entries(manifests)) { + await registry.package({ manifest, times: times[name] ?? 1 }) + for (const [version, tarballManifest] of Object.entries(manifest.versions)) { + await registry.tarball({ + manifest: tarballManifest, + tarball: join(res.other, 'tarballs', name, version), + }) + } + } -const mocks = { - 'proc-log': { info: noop, verbose: noop }, - libnpmdiff: (...args) => libnpmdiff(...args), - 'npm-registry-fetch': async () => ({}), + if (exec) { + await npm.exec('diff', exec) + res.output = res.joinedOutput() + } + + return { npm, registry, ...res } } -t.afterEach(() => { - config.global = false - config.tag = 'latest' - config.diff = [] - flatOptions.global = false - flatOptions.diffUnified = null - flatOptions.diffIgnoreAllSpace = false - flatOptions.diffNoPrefix = false - flatOptions.diffSrcPrefix = '' - flatOptions.diffDstPrefix = '' - flatOptions.diffText = false - flatOptions.savePrefix = '^' - npm.globalDir = fooPath - npm.prefix = fooPath - libnpmdiff = noop - diff.prefix = undefined - diff.top = undefined -}) +// a more specific helper to call diff against a local package and a registry package +// and assert the diff output contains the matching strings +const assertFoo = async (t, arg) => { + let diff = [] + let exec = [] + + if (typeof arg === 'string' || Array.isArray(arg)) { + diff = arg + } else if (arg && typeof arg === 'object') { + diff = arg.diff + exec = arg.exec + } + + const { output } = await mockDiff(t, { + diff, + prefixDir: { + 'package.json': { name: 'foo', version: '1.0.0' }, + 'index.js': 'const version = "1.0.0"', + 'a.js': 'const a = "a@1.0.0"', + 'b.js': 'const b = "b@1.0.0"', + }, + tarballs: { + 'foo@0.1.0': { + 'index.js': 'const version = "0.1.0"', + 'a.js': 'const a = "a@0.1.0"', + 'b.js': 'const b = "b@0.1.0"', + }, + }, + exec, + }) -const Diff = t.mock('../../../lib/commands/diff.js', mocks) -const diff = new Diff(npm) + const hasFile = (f) => !exec.length || exec.some(e => e.endsWith(f)) -t.test('no args', t => { - t.test('in a project dir', async t => { - t.plan(3) + if (hasFile('package.json')) { + t.match(output, /-\s*"version": "0\.1\.0"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) + } - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@latest', 'should have default spec comparison') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, npm.flatOptions, 'should forward flat options') - } + if (hasFile('index.js')) { + t.match(output, /-\s*const version = "0\.1\.0"/) + t.match(output, /\+\s*const version = "1\.0\.0"/) + } + + if (hasFile('a.js')) { + t.match(output, /-\s*const a = "a@0\.1\.0"/) + t.match(output, /\+\s*const a = "a@1\.0\.0"/) + } + + if (hasFile('b.js')) { + t.match(output, /-\s*const b = "b@0\.1\.0"/) + t.match(output, /\+\s*const b = "b@1\.0\.0"/) + } - npm.prefix = fooPath - await diff.exec([]) + return output +} + +const rejectDiff = async (t, msg, opts) => { + const { npm } = await mockDiff(t, opts) + await t.rejects(npm.exec('diff', []), msg) +} + +t.test('no args', async t => { + t.test('in a project dir', async t => { + const output = await assertFoo(t) + t.matchSnapshot(output) }) t.test('no args, missing package.json name in cwd', async t => { - const path = t.testdir({}) - npm.prefix = path - await t.rejects( - diff.exec([]), - /Needs multiple arguments to compare or run from a project dir./, - 'should throw EDIFF error msg' - ) + await rejectDiff(t, /Needs multiple arguments to compare or run from a project dir./) }) t.test('no args, bad package.json in cwd', async t => { - const path = t.testdir({ - 'package.json': '{invalid"json', + await rejectDiff(t, /Needs multiple arguments to compare or run from a project dir./, { + prefixDir: { 'package.json': '{invalid"json' }, }) - npm.prefix = path - - await t.rejects( - diff.exec([]), - /Needs multiple arguments to compare or run from a project dir./, - 'should throw EDIFF error msg' - ) }) - - t.end() }) -t.test('single arg', t => { +t.test('single arg', async t => { t.test('spec using cwd package name', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@1.0.0', 'should forward single spec') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, npm.flatOptions, 'should forward flat options') - } - - config.diff = ['foo@1.0.0'] - npm.prefix = fooPath - await diff.exec([]) + await assertFoo(t, 'foo@0.1.0') }) t.test('unknown spec, no package.json', async t => { - const path = t.testdir({}) - - config.diff = ['foo@1.0.0'] - npm.prefix = path - await t.rejects( - diff.exec([]), - /Needs multiple arguments to compare or run from a project dir./, - 'should throw usage error' - ) + await rejectDiff(t, /Needs multiple arguments to compare or run from a project dir./, { + diff: ['foo@1.0.0'], + }) }) t.test('spec using semver range', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@~1.0.0', 'should forward single spec') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, npm.flatOptions, 'should forward flat options') - } - - config.diff = ['foo@~1.0.0'] - await diff.exec([]) + await assertFoo(t, 'foo@~0.1.0') }) t.test('version', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@2.1.4', 'should convert to expected first spec') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match(opts, npm.flatOptions, 'should forward flat options') - } - - config.diff = ['2.1.4'] - await diff.exec([]) + await assertFoo(t, '0.1.0') }) t.test('version, no package.json', async t => { - const path = t.testdir({}) - npm.prefix = path - config.diff = ['2.1.4'] - await t.rejects( - diff.exec([]), - /Needs multiple arguments to compare or run from a project dir./, - 'should throw an error message explaining usage' - ) + await rejectDiff(t, /Needs multiple arguments to compare or run from a project dir./, { + diff: ['0.1.0'], + }) }) t.test('version, filtering by files', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@2.1.4', 'should use expected spec') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match( - opts, - { - ...npm.flatOptions, - diffFiles: ['./foo.js', './bar.js'], - }, - 'should forward flatOptions and diffFiles' - ) - } - - config.diff = ['2.1.4'] - await diff.exec(['./foo.js', './bar.js']) + const output = await assertFoo(t, { diff: '0.1.0', exec: ['./a.js', './b.js'] }) + t.matchSnapshot(output) }) t.test('spec is not a dep', async t => { - t.plan(2) - - const path = t.testdir({ - node_modules: {}, - 'package.json': JSON.stringify({ - name: 'my-project', - }), + const { output } = await mockDiff(t, { + diff: 'bar@1.0.0', + prefixDir: { + node_modules: {}, + 'package.json': { name: 'my-project', version: '1.0.0' }, + }, + tarballs: { + 'bar@1.0.0': {}, + }, + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should have current spec') - t.equal(b, `file:${path}`, 'should compare to cwd') - } - - config.diff = ['bar@1.0.0'] - npm.prefix = path - - await diff.exec([]) + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "my-project"/) }) t.test('unknown package name', async t => { - t.plan(3) - - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + const { npm, registry } = await mockDiff(t, { + diff: 'bar', + prefixDir: { + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, }) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'simple-output@*', 'should forward single spec') - t.equal(b, `file:${path}`, 'should compare to cwd') - t.match(opts, npm.flatOptions, 'should forward flat options') - } - - config.diff = ['simple-output'] - npm.prefix = path - await diff.exec([]) + registry.getPackage('bar', { times: 2, code: 404 }) + t.rejects(npm.exec('diff', []), /404 Not Found.*bar/) }) t.test('unknown package name, no package.json', async t => { - const path = t.testdir({}) - - config.diff = ['bar'] - npm.prefix = path - await t.rejects( - diff.exec([]), - /Needs multiple arguments to compare or run from a project dir./, - 'should throw usage error' - ) + const { npm } = await mockDiff(t, { + diff: 'bar', + }) + t.rejects(npm.exec('diff', []), + /Needs multiple arguments to compare or run from a project dir./) }) t.test('transform single direct dep name into spec comparison', async t => { - t.plan(4) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), - }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + const { output } = await mockDiff(t, { + diff: 'bar', + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }), - }) - - config.diff = ['bar'] - npm.prefix = path - - const Diff = t.mock('../../../lib/commands/diff.js', { - ...mocks, - pacote: { - packument: spec => { - t.equal(spec.name, 'bar', 'should have expected spec name') + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, }, - 'npm-pick-manifest': (packument, target) => { - t.equal(target, '^1.0.0', 'should use expected target') - return { version: '1.8.10' } - }, - libnpmdiff: async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'bar@1.8.10', 'should have possible semver range spec') + tarballs: { + 'bar@1.8.0': {}, }, + times: { bar: 2 }, + exec: [], }) - const diff = new Diff(npm) - await diff.exec([]) + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "1\.8\.0"/) }) t.test('global space, transform single direct dep name', async t => { - t.plan(4) - - const path = t.testdir({ - globalDir: { - lib: { - node_modules: { - lorem: { - 'package.json': JSON.stringify({ - name: 'lorem', - version: '2.0.0', - }), + const { output } = await mockDiff(t, { + diff: 'lorem', + config: { + global: true, + }, + globalPrefixDir: { + node_modules: { + lorem: { + 'package.json': { + name: 'lorem', + version: '2.0.0', }, }, }, }, - project: { + prefixDir: { node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + lorem: { + 'package.json': { + name: 'lorem', + version: '3.0.0', + }, }, }, - 'package.json': JSON.stringify({ + 'package.json': { name: 'my-project', dependencies: { - bar: '^1.0.0', + lorem: '^3.0.0', }, - }), - }, - }) - - config.global = true - flatOptions.global = true - config.diff = ['lorem'] - npm.prefix = resolve(path, 'project') - npm.globalDir = resolve(path, 'globalDir/lib/node_modules') - - const Diff = t.mock('../../../lib/commands/diff.js', { - ...mocks, - pacote: { - packument: spec => { - t.equal(spec.name, 'lorem', 'should have expected spec name') }, }, - 'npm-pick-manifest': (packument, target) => { - t.equal(target, '*', 'should always want latest in global space') - return { version: '2.1.0' } + tarballs: { + 'lorem@1.0.0': {}, }, - libnpmdiff: async ([a, b], opts) => { - t.equal( - a, - `lorem@file:${resolve(path, 'globalDir/lib/node_modules/lorem')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'lorem@2.1.0', 'should have possible semver range spec') + times: { + lorem: 2, }, + exec: [], }) - const diff = new Diff(npm) - await diff.exec([]) + t.match(output, 'lorem') + t.match(output, /-\s*"version": "2\.0\.0"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) }) t.test('transform single spec into spec comparison', async t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { output } = await mockDiff(t, { + diff: 'bar@2.0.0', + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar@2.0.0': {}, + }, + times: { + lorem: 2, + }, + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'bar@2.0.0', 'should have expected comparison spec') - } - - config.diff = ['bar@2.0.0'] - npm.prefix = path - - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('transform single spec from transitive deps', async t => { - t.plan(4) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - dependencies: { - lorem: '^2.0.0', + const { output } = await mockDiff(t, { + diff: 'lorem', + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + dependencies: { + lorem: '^2.0.0', + }, }, - }), - }, - lorem: { - 'package.json': JSON.stringify({ - name: 'lorem', - version: '2.0.0', - }), - }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + }, + lorem: { + 'package.json': { + name: 'lorem', + version: '2.0.0', + }, + }, }, - }), - }) - - const Diff = t.mock('../../../lib/commands/diff.js', { - ...mocks, - pacote: { - packument: spec => { - t.equal(spec.name, 'lorem', 'should have expected spec name') + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, }, - 'npm-pick-manifest': (packument, target) => { - t.equal(target, '^2.0.0', 'should target first semver-range spec found') - return { version: '2.2.2' } + tarballs: { + 'lorem@2.2.2': {}, }, - libnpmdiff: async ([a, b], opts) => { - t.equal( - a, - `lorem@file:${resolve(path, 'node_modules/lorem')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'lorem@2.2.2', 'should have expected target spec') + times: { + lorem: 2, }, + exec: [], }) - const diff = new Diff(npm) - - config.diff = ['lorem'] - npm.prefix = path - await diff.exec([]) + t.match(output, 'lorem') + t.match(output, /-\s*"version": "2\.0\.0"/) + t.match(output, /\+\s*"version": "2\.2\.2"/) }) t.test('missing actual tree', async t => { - t.plan(2) - - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-project', - }), - }) - - const Diff = t.mock('../../../lib/commands/diff.js', { - ...mocks, - '@npmcli/arborist': class { - constructor () { - throw new Error('ERR') - } + const { output } = await mockDiff(t, { + diff: 'lorem', + prefixDir: { + 'package.json': { + name: 'lorem', + version: '2.0.0', + }, + }, + mocks: { + '@npmcli/arborist': class { + constructor () { + throw new Error('ERR') + } + }, }, - libnpmdiff: async ([a, b], opts) => { - t.equal(a, 'lorem@*', 'should target any version of pkg name') - t.equal(b, `file:${path}`, 'should target current cwd') + tarballs: { + 'lorem@2.2.2': {}, }, + exec: [], }) - const diff = new Diff(npm) - config.diff = ['lorem'] - npm.prefix = path - - await diff.exec([]) + t.match(output, 'lorem') + t.match(output, /-\s*"version": "2\.2\.2"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('unknown package name', async t => { - t.plan(2) + const { output } = await mockDiff(t, { + diff: 'bar', + prefixDir: { + 'package.json': { version: '1.0.0' }, + }, - const path = t.testdir({ - 'package.json': JSON.stringify({ version: '1.0.0' }), + tarballs: { + 'bar@2.2.2': {}, + }, + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@*', 'should target any version of pkg name') - t.equal(b, `file:${path}`, 'should compare to cwd') - } - - config.diff = ['bar'] - npm.prefix = path - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "2\.2\.2"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) }) t.test('use project name in project dir', async t => { - t.plan(2) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@*', 'should target any version of pkg name') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - } + const { output } = await mockDiff(t, { + diff: 'foo', + prefixDir: { + 'package.json': { name: 'foo', version: '1.0.0' }, + }, + tarballs: { + 'foo@2.2.2': {}, + }, + exec: [], + }) - config.diff = ['foo'] - await diff.exec([]) + t.match(output, 'foo') + t.match(output, /-\s*"version": "2\.2\.2"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) }) t.test('dir spec type', async t => { - t.plan(2) - - const otherPath = resolve('/path/to/other-dir') - libnpmdiff = async ([a, b], opts) => { - t.equal(a, `file:${otherPath}`, 'should target dir') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - } + const { output } = await mockDiff(t, { + diff: '../other/other-pkg', + prefixDir: { + 'package.json': { name: 'foo', version: '1.0.0' }, + }, + otherDirs: { + 'other-pkg': { + 'package.json': { name: 'foo', version: '2.0.0' }, + }, + }, + exec: [], + }) - config.diff = [otherPath] - await diff.exec([]) + t.match(output, 'foo') + t.match(output, /-\s*"version": "2\.0\.0"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) }) t.test('unsupported spec type', async t => { - config.diff = ['git+https://github.com/user/foo'] + const p = mockDiff(t, { + diff: 'git+https://github.com/user/foo', + exec: [], + }) + await t.rejects( - diff.exec([]), + p, /Spec type git not supported./, 'should throw spec type not supported error.' ) }) - - t.end() }) -t.test('first arg is a qualified spec', t => { +t.test('first arg is a qualified spec', async t => { t.test('second arg is ALSO a qualified spec', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should set expected first spec') - t.equal(b, 'bar@^2.0.0', 'should set expected second spec') - t.match(opts, npm.flatOptions, 'should forward flat options') - } + const { output } = await mockDiff(t, { + diff: ['bar@1.0.0', 'bar@^2.0.0'], + tarballs: { + 'bar@1.0.0': {}, + 'bar@2.2.2': {}, + }, + times: { + bar: 2, + }, + exec: [], + }) - config.diff = ['bar@1.0.0', 'bar@^2.0.0'] - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.2\.2"/) }) t.test('second arg is a known dependency name', async t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar@2.0.0': {}, + }, + diff: ['bar@2.0.0', 'bar'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@2.0.0', 'should set expected first spec') - t.equal( - b, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - } - - npm.prefix = path - config.diff = ['bar@2.0.0', 'bar'] - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "2\.0\.0"/) + t.match(output, /\+\s*"version": "1\.0\.0"/) }) t.test('second arg is a valid semver version', async t => { - t.plan(2) - - config.diff = ['bar@1.0.0', '2.0.0'] - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should set expected first spec') - t.equal(b, 'bar@2.0.0', 'should use name from first arg') - } + const { output } = await mockDiff(t, { + tarballs: { + 'bar@1.0.0': {}, + 'bar@2.0.0': {}, + }, + times: { + bar: 2, + }, + diff: ['bar@1.0.0', '2.0.0'], + exec: [], + }) - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is an unknown dependency name', async t => { - t.plan(2) + const { output } = await mockDiff(t, { + tarballs: { + 'bar@1.0.0': {}, + 'bar-fork@2.0.0': {}, + }, + diff: ['bar@1.0.0', 'bar-fork'], + exec: [], + }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should set expected first spec') - t.equal(b, 'bar-fork@*', 'should target any version if not a dep') - } + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "bar-fork"/) - config.diff = ['bar@1.0.0', 'bar-fork'] - await diff.exec([]) + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) - - t.end() }) t.test('first arg is a known dependency name', async t => { - t.test('second arg is a qualified spec', t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + t.test('second arg is a qualified spec', async t => { + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar@2.0.0': {}, + }, + diff: ['bar', 'bar@2.0.0'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'bar@2.0.0', 'should set expected second spec') - } - - npm.prefix = path - config.diff = ['bar', 'bar@2.0.0'] - diff.exec([], err => { - if (err) { - throw err - } - }) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) - t.test('second arg is ALSO a known dependency', t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + t.test('second arg is ALSO a known dependency', async t => { + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, + 'bar-fork': { + 'package.json': { + name: 'bar-fork', + version: '1.0.0', + }, + }, }, - 'bar-fork': { - 'package.json': JSON.stringify({ - name: 'bar-fork', - version: '1.0.0', - }), + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', - }, - }), + diff: ['bar', 'bar-fork'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal( - b, - `bar-fork@file:${resolve(path, 'node_modules/bar-fork')}`, - 'should target fork local node_modules pkg' - ) - } - - npm.prefix = path - config.diff = ['bar', 'bar-fork'] - diff.exec([], err => { - if (err) { - throw err - } - }) + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "bar-fork"/) }) - t.test('second arg is a valid semver version', t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + t.test('second arg is a valid semver version', async t => { + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar@2.0.0': {}, + }, + diff: ['bar', '2.0.0'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'bar@2.0.0', 'should use package name from first arg') - } - - npm.prefix = path - config.diff = ['bar', '2.0.0'] - diff.exec([], err => { - if (err) { - throw err - } - }) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is an unknown dependency name', async t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '1.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar-fork@1.0.0': {}, + }, + diff: ['bar', 'bar-fork'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal( - a, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - t.equal(b, 'bar-fork@*', 'should set expected second spec') - } - - npm.prefix = path - config.diff = ['bar', 'bar-fork'] - await diff.exec([]) + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "bar-fork"/) }) - - t.end() }) -t.test('first arg is a valid semver range', t => { +t.test('first arg is a valid semver range', async t => { t.test('second arg is a qualified spec', async t => { - t.plan(2) - - config.diff = ['1.0.0', 'bar@2.0.0'] - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should use name from second arg') - t.equal(b, 'bar@2.0.0', 'should use expected spec') - } + const { output } = await mockDiff(t, { + tarballs: { + 'bar@1.0.0': {}, + 'bar@2.0.0': {}, + }, + diff: ['1.0.0', 'bar@2.0.0'], + times: { bar: 2 }, + exec: [], + }) - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is a known dependency', async t => { - t.plan(2) - - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '2.0.0', - }), + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '2.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar@1.0.0': {}, + }, + diff: ['1.0.0', 'bar'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should use name from second arg') - t.equal( - b, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should set expected second spec from nm' - ) - } - - npm.prefix = path - config.diff = ['1.0.0', 'bar'] - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is ALSO a semver version', async t => { - t.plan(2) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@1.0.0', 'should use name from project dir') - t.equal(b, 'foo@2.0.0', 'should use name from project dir') - } + const { output } = await mockDiff(t, { + prefixDir: { + 'package.json': { + name: 'bar', + }, + }, + tarballs: { + 'bar@1.0.0': {}, + 'bar@2.0.0': {}, + }, + diff: ['1.0.0', '2.0.0'], + times: { bar: 2 }, + exec: [], + }) - config.diff = ['1.0.0', '2.0.0'] - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is ALSO a semver version BUT cwd not a project dir', async t => { - const path = t.testdir({}) - config.diff = ['1.0.0', '2.0.0'] - npm.prefix = path + const p = mockDiff(t, { + diff: ['1.0.0', '2.0.0'], + exec: [], + }) await t.rejects( - diff.exec([]), + p, /Needs to be run from a project dir in order to diff two versions./, 'should throw two versions need project dir error usage msg' ) }) t.test('second arg is an unknown dependency name', async t => { - t.plan(2) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@1.0.0', 'should use name from second arg') - t.equal(b, 'bar@*', 'should compare against any version') - } + const { output } = await mockDiff(t, { + prefixDir: { + prefixDir: { + 'package.json': { + name: 'bar', + }, + }, + }, + tarballs: { + 'bar@1.0.0': {}, + 'bar@2.0.0': {}, + }, + diff: ['1.0.0', 'bar'], + times: { bar: 2 }, + exec: [], + }) - config.diff = ['1.0.0', 'bar'] - await diff.exec([]) + t.match(output, 'bar') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) t.test('second arg is a qualified spec, missing actual tree', async t => { - t.plan(2) - - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-project', - }), - }) - - const Diff = t.mock('../../../lib/commands/diff.js', { - ...mocks, - '@npmcli/arborist': class { - constructor () { - throw new Error('ERR') - } + const { output } = await mockDiff(t, { + prefixDir: { + 'package.json': { + name: 'lorem', + version: '2.0.0', + }, }, - libnpmdiff: async ([a, b], opts) => { - t.equal(a, 'lorem@1.0.0', 'should target latest version of pkg name') - t.equal(b, 'lorem@2.0.0', 'should target expected spec') + mocks: { + '@npmcli/arborist': class { + constructor () { + throw new Error('ERR') + } + }, + }, + tarballs: { + 'lorem@1.0.0': {}, + 'lorem@2.0.0': {}, }, + times: { lorem: 2 }, + diff: ['1.0.0', 'lorem@2.0.0'], + exec: [], }) - const diff = new Diff(npm) - - config.diff = ['1.0.0', 'lorem@2.0.0'] - npm.prefix = path - await diff.exec([]) + t.match(output, 'lorem') + t.match(output, /-\s*"version": "1\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) }) - - t.end() }) -t.test('first arg is an unknown dependency name', t => { - t.test('second arg is a qualified spec', t => { - t.plan(4) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@*', 'should set expected first spec') - t.equal(b, 'bar@2.0.0', 'should set expected second spec') - t.match(opts, npm.flatOptions, 'should forward flat options') - t.match(opts, { where: fooPath }, 'should forward pacote options') - } - - config.diff = ['bar', 'bar@2.0.0'] - diff.exec([], err => { - if (err) { - throw err - } +t.test('first arg is an unknown dependency name', async t => { + t.test('second arg is a qualified spec', async t => { + const { output } = await mockDiff(t, { + tarballs: { + 'bar@2.0.0': {}, + 'bar@3.0.0': {}, + }, + times: { bar: 2 }, + diff: ['bar', 'bar@2.0.0'], + exec: [], }) - }) - t.test('second arg is a known dependency', t => { - t.plan(2) + t.match(output, 'bar') + t.match(output, /-\s*"version": "3\.0\.0"/) + t.match(output, /\+\s*"version": "2\.0\.0"/) + }) - const path = t.testdir({ - node_modules: { - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '2.0.0', - }), + t.test('second arg is a known dependency', async t => { + const { output } = await mockDiff(t, { + prefixDir: { + node_modules: { + bar: { + 'package.json': { + name: 'bar', + version: '2.0.0', + }, + }, }, - }, - 'package.json': JSON.stringify({ - name: 'my-project', - dependencies: { - bar: '^1.0.0', + 'package.json': { + name: 'my-project', + dependencies: { + bar: '^1.0.0', + }, }, - }), + }, + tarballs: { + 'bar-fork@2.0.0': {}, + }, + diff: ['bar-fork', 'bar'], + exec: [], }) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar-fork@*', 'should use any version') - t.equal( - b, - `bar@file:${resolve(path, 'node_modules/bar')}`, - 'should target local node_modules pkg' - ) - } - - npm.prefix = path - config.diff = ['bar-fork', 'bar'] - diff.exec([], err => { - if (err) { - throw err - } - }) + t.match(output, /-\s*"name": "bar-fork"/) + t.match(output, /\+\s*"name": "bar"/) }) - t.test('second arg is a valid semver version', t => { - t.plan(2) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@*', 'should use any version') - t.equal(b, 'bar@^1.0.0', 'should use name from first arg') - } - - config.diff = ['bar', '^1.0.0'] - diff.exec([], err => { - if (err) { - throw err - } - }) - }) - - t.test('second arg is ALSO an unknown dependency name', t => { - t.plan(2) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@*', 'should use any version') - t.equal(b, 'bar-fork@*', 'should use any version') - } - - config.diff = ['bar', 'bar-fork'] - diff.exec([], err => { - if (err) { - throw err - } + t.test('second arg is a valid semver version', async t => { + const { output } = await mockDiff(t, { + tarballs: { + 'bar@1.5.0': {}, + 'bar@2.0.0': {}, + }, + times: { bar: 2 }, + diff: ['bar', '^1.0.0'], + exec: [], }) - }) - - t.test('cwd not a project dir', t => { - t.plan(2) - const path = t.testdir({}) - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'bar@*', 'should use any version') - t.equal(b, 'bar-fork@*', 'should use any version') - } - - config.diff = ['bar', 'bar-fork'] - npm.prefix = path - - diff.exec([], err => { - if (err) { - throw err - } - }) + t.match(output, 'bar') + t.match(output, /-\s*"version": "2\.0\.0"/) + t.match(output, /\+\s*"version": "1\.5\.0"/) }) - t.end() -}) - -t.test('various options', t => { - t.test('using --name-only option', async t => { - t.plan(1) - - flatOptions.diffNameOnly = true - - libnpmdiff = async ([a, b], opts) => { - t.match( - opts, - { - ...npm.flatOptions, - diffNameOnly: true, + t.test('second arg is ALSO an unknown dependency name', async t => { + const { output } = await mockDiff(t, { + prefixDir: { + 'package.json': { + name: 'my-project', }, - 'should forward nameOnly=true option' - ) - } + }, + tarballs: { + 'bar@1.0.0': {}, + 'bar-fork@1.0.0': {}, + }, + diff: ['bar', 'bar-fork'], + exec: [], + }) - await diff.exec([]) + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "bar-fork"/) }) - t.test('set files after both versions', async t => { - t.plan(3) - - config.diff = ['2.1.4', '3.0.0'] - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@2.1.4', 'should use expected spec') - t.equal(b, 'foo@3.0.0', 'should use expected spec') - t.match( - opts, - { - ...npm.flatOptions, - diffFiles: ['./foo.js', './bar.js'], - }, - 'should forward diffFiles values' - ) - } + t.test('cwd not a project dir', async t => { + const { output } = await mockDiff(t, { + tarballs: { + 'bar@1.0.0': {}, + 'bar-fork@1.0.0': {}, + }, + diff: ['bar', 'bar-fork'], + exec: [], + }) - await diff.exec(['./foo.js', './bar.js']) + t.match(output, /-\s*"name": "bar"/) + t.match(output, /\+\s*"name": "bar-fork"/) }) +}) - t.test('set files no diff args', async t => { - t.plan(3) - - libnpmdiff = async ([a, b], opts) => { - t.equal(a, 'foo@latest', 'should have default spec') - t.equal(b, `file:${fooPath}`, 'should compare to cwd') - t.match( - opts, - { - ...npm.flatOptions, - diffFiles: ['./foo.js', './bar.js'], - }, - 'should forward all remaining items as filenames' - ) - } +t.test('various options', async t => { + const mockOptions = async (t, config) => { + const file = (v) => new Array(50).fill(0).map((_, i) => `${i}${i === 20 ? v : ''}`).join('\n') + const mock = await mockDiff(t, { + diff: ['bar@2.0.0', 'bar@3.0.0'], + config, + exec: [], + tarballs: { + 'bar@2.0.0': { 'index.js': file('2.0.0') }, + 'bar@3.0.0': { 'index.js': file('3.0.0') }, + }, + times: { bar: 2 }, + }) + + return mock + } - await diff.exec(['./foo.js', './bar.js']) + t.test('using --name-only option', async t => { + const { output } = await mockOptions(t, { + 'diff-name-only': true, + }) + t.matchSnapshot(output) }) t.test('using diff option', async t => { - t.plan(1) - - flatOptions.diffContext = 5 - flatOptions.diffIgnoreWhitespace = true - flatOptions.diffNoPrefix = false - flatOptions.diffSrcPrefix = 'foo/' - flatOptions.diffDstPrefix = 'bar/' - flatOptions.diffText = true - - libnpmdiff = async ([a, b], opts) => { - t.match( - opts, - { - ...npm.flatOptions, - diffContext: 5, - diffIgnoreWhitespace: true, - diffNoPrefix: false, - diffSrcPrefix: 'foo/', - diffDstPrefix: 'bar/', - diffText: true, - }, - 'should forward diff options' - ) - } + const { output } = await mockOptions(t, { + 'diff-context': 5, + 'diff-ignore-whitespace': true, + 'diff-no-prefix': false, + 'diff-drc-prefix': 'foo/', + 'diff-fst-prefix': 'bar/', + 'diff-text': true, - await diff.exec([]) + }) + t.matchSnapshot(output) }) - - t.end() }) t.test('too many args', async t => { - config.diff = ['a', 'b', 'c'] + const { npm } = await mockDiff(t, { + diff: ['a', 'b', 'c'], + }) + await t.rejects( - diff.exec([]), + npm.exec('diff', []), /Can't use more than two --diff arguments./, 'should throw usage error' ) }) -t.test('workspaces', t => { - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-test', - version: '1.2.3-test', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], - }), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.2.3-a', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.2.3-b', - }), - }, - 'workspace-c': JSON.stringify({ +t.test('workspaces', async t => { + const mockWorkspaces = (t, workspaces = true, opts) => mockDiff(t, { + prefixDir: { 'package.json': { - name: 'workspace-n', - version: '1.2.3-n', + name: 'workspaces-test', + version: '1.2.3', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], }, - }), + 'workspace-a': { + 'package.json': { + name: 'workspace-a', + version: '1.2.3-a', + }, + }, + 'workspace-b': { + 'package.json': { + name: 'workspace-b', + version: '1.2.3-b', + }, + }, + 'workspace-c': { + 'package.json': { + name: 'workspace-c', + version: '1.2.3-c', + }, + }, + }, + exec: [], + config: workspaces === true ? { workspaces } : { workspace: workspaces }, + ...opts, }) t.test('all workspaces', async t => { - const diffCalls = [] - libnpmdiff = async ([a, b]) => { - diffCalls.push([a, b]) - } - npm.prefix = path - npm.localPrefix = path - await diff.execWorkspaces([], []) - t.same( - diffCalls, - [ - ['workspace-a@latest', join(`file:${path}`, 'workspace-a')], - ['workspace-b@latest', join(`file:${path}`, 'workspace-b')], - ], - 'should call libnpmdiff with workspaces params' - ) + const { output } = await mockWorkspaces(t, true, { + tarballs: { + 'workspace-a@2.0.0-a': {}, + 'workspace-b@2.0.0-b': {}, + 'workspace-c@2.0.0-c': {}, + }, + }) + + t.match(output, '"name": "workspace-a"') + t.match(output, /-\s*"version": "2\.0\.0-a"/) + t.match(output, /\+\s*"version": "1\.2\.3-a"/) + + t.match(output, '"name": "workspace-b"') + t.match(output, /-\s*"version": "2\.0\.0-b"/) + t.match(output, /\+\s*"version": "1\.2\.3-b"/) + + t.match(output, '"name": "workspace-c"') + t.match(output, /-\s*"version": "2\.0\.0-c"/) + t.match(output, /\+\s*"version": "1\.2\.3-c"/) }) t.test('one workspace', async t => { - const diffCalls = [] - libnpmdiff = async ([a, b]) => { - diffCalls.push([a, b]) - } - npm.prefix = path - npm.localPrefix = path - await diff.execWorkspaces([], ['workspace-a']) - t.same( - diffCalls, - [['workspace-a@latest', join(`file:${path}`, 'workspace-a')]], - 'should call libnpmdiff with workspaces params' - ) + const { output } = await mockWorkspaces(t, 'workspace-a', { + tarballs: { + 'workspace-a@2.0.0-a': {}, + }, + }) + + t.match(output, '"name": "workspace-a"') + t.match(output, /-\s*"version": "2\.0\.0-a"/) + t.match(output, /\+\s*"version": "1\.2\.3-a"/) + + t.notMatch(output, '"name": "workspace-b"') + t.notMatch(output, '"name": "workspace-c"') }) t.test('invalid workspace', async t => { - libnpmdiff = () => { - t.fail('should not call libnpmdiff') - } - npm.prefix = path - npm.localPrefix = path - await t.rejects(diff.execWorkspaces([], ['workspace-x']), /No workspaces found/) - await t.rejects(diff.execWorkspaces([], ['workspace-x']), /workspace-x/) + const p = mockWorkspaces(t, 'workspace-x') + await t.rejects(p, /No workspaces found/) + await t.rejects(p, /workspace-x/) }) - t.end() }) diff --git a/deps/npm/test/lib/commands/dist-tag.js b/deps/npm/test/lib/commands/dist-tag.js index 464f5bc9392d80..4cc241f74582d1 100644 --- a/deps/npm/test/lib/commands/dist-tag.js +++ b/deps/npm/test/lib/commands/dist-tag.js @@ -1,15 +1,36 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const realFetch = require('npm-registry-fetch') +const mockNpm = require('../../fixtures/mock-npm') -let result = '' -let log = '' - -t.afterEach(() => { - result = '' - log = '' -}) +const fixtures = { + workspace: { + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + name: 'workspace-c', + version: '1.0.0', + }), + }, + }, +} -const routeMap = { +const tags = { '/-/package/@scoped%2fpkg/dist-tags': { latest: '1.0.0', a: '0.0.1', @@ -40,67 +61,70 @@ const routeMap = { }, } -// XXX overriding this does not appear to do anything, adding t.plan to things -// that use it fails the test -let npmRegistryFetchMock = (url, opts) => { - if (url === '/-/package/foo/dist-tags') { - throw new Error('no package found') - } +const mockDist = async (t, { ...npmOpts } = {}) => { + const getTag = async (url) => ({ ...tags })[url] - return routeMap[url] -} + let fetchOpts + const nrf = async (url, opts) => { + fetchOpts = opts -npmRegistryFetchMock.json = async (url, opts) => { - return routeMap[url] -} + if (url === '/-/package/foo/dist-tags') { + throw new Error('no package found') + } -const logger = (...msgs) => { - for (const msg of [...msgs]) { - log += msg + ' ' + return getTag(url) } - log += '\n' -} + const mock = await mockNpm(t, { + ...npmOpts, + mocks: { + 'npm-registry-fetch': Object.assign(nrf, realFetch, { json: getTag }), + }, + }) -const DistTag = t.mock('../../../lib/commands/dist-tag.js', { - 'proc-log': { - error: logger, - info: logger, - verbose: logger, - warn: logger, - }, - get 'npm-registry-fetch' () { - return npmRegistryFetchMock - }, -}) + const usage = await mock.npm.cmd('dist-tag').then(c => c.usage) -const config = {} -const npm = mockNpm({ - config, - output: msg => { - result = result ? [result, msg].join('\n') : msg - }, -}) -const distTag = new DistTag(npm) + return { + ...mock, + distTag: { + exec: (args) => mock.npm.exec('dist-tag', args), + usage, + completion: (remain) => mock.npm.cmd('dist-tag').then(c => c.completion({ + conf: { argv: { remain } }, + })), + }, + fetchOpts: () => fetchOpts, + result: () => mock.joinedOutput(), + logs: () => { + const distLogs = mock.logs.filter(l => l[1].startsWith('dist-tag')) + return distLogs.map(([, ...parts]) => { + return parts.map(p => p.toString()).join(' ').trim() + }).join('\n').trim() + }, + } +} t.test('ls in current package', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: '@scoped/pkg', - }), + const { distTag, result } = await mockDist(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: '@scoped/pkg', + }), + }, }) await distTag.exec(['ls']) t.matchSnapshot( - result, + result(), 'should list available tags for current package' ) }) t.test('ls global', async t => { - t.teardown(() => { - config.global = false + const { distTag } = await mockDist(t, { + config: { + global: true, + }, }) - config.global = true await t.rejects( distTag.exec(['ls']), distTag.usage, @@ -109,20 +133,22 @@ t.test('ls global', async t => { }) t.test('no args in current package', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: '@scoped/pkg', - }), + const { distTag, result } = await mockDist(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: '@scoped/pkg', + }), + }, }) await distTag.exec([]) t.matchSnapshot( - result, + result(), 'should default to listing available tags for current package' ) }) t.test('borked cmd usage', async t => { - npm.prefix = t.testdir({}) + const { distTag } = await mockDist(t) await t.rejects( distTag.exec(['borked', '@scoped/pkg']), distTag.usage, @@ -131,31 +157,33 @@ t.test('borked cmd usage', async t => { }) t.test('ls on named package', async t => { - npm.prefix = t.testdir({}) + const { distTag, result } = await mockDist(t) await distTag.exec(['ls', '@scoped/another']) t.matchSnapshot( - result, + result(), 'should list tags for the specified package' ) }) t.test('ls on missing package', async t => { - npm.prefix = t.testdir({}) + const { distTag, logs } = await mockDist(t) await t.rejects( distTag.exec(['ls', 'foo']), distTag.usage ) t.matchSnapshot( - log, + logs(), 'should log no dist-tag found msg' ) }) t.test('ls on missing name in current package', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - version: '1.0.0', - }), + const { distTag } = await mockDist(t, { + prefixDir: { + 'package.json': JSON.stringify({ + version: '1.0.0', + }), + }, }) await t.rejects( distTag.exec(['ls']), @@ -165,107 +193,78 @@ t.test('ls on missing name in current package', async t => { }) t.test('only named package arg', async t => { - npm.prefix = t.testdir({}) + const { distTag, result } = await mockDist(t) await distTag.exec(['@scoped/another']) t.matchSnapshot( - result, + result(), 'should default to listing tags for the specified package' ) }) -t.test('workspaces', t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'root', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], - }), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), - }, - 'workspace-c': { - 'package.json': JSON.stringify({ - name: 'workspace-c', - version: '1.0.0', - }), - }, - }) +t.test('workspaces', async t => { + const mockWorkspaces = async (t, exec = [], workspaces = true, prefixDir = {}) => { + const mock = await mockDist(t, { + prefixDir: { + ...fixtures.workspace, + ...prefixDir, + }, + config: workspaces === true ? { workspaces } : { workspace: workspaces }, + }) + + await mock.distTag.exec(exec) + + return mock + } t.test('no args', async t => { - await distTag.execWorkspaces([], []) - t.matchSnapshot(result, 'printed the expected output') + const { result } = await mockWorkspaces(t) + t.matchSnapshot(result(), 'printed the expected output') }) t.test('no args, one workspace', async t => { - await distTag.execWorkspaces([], ['workspace-a']) - t.matchSnapshot(result, 'printed the expected output') + const { result } = await mockWorkspaces(t, [], 'workspace-a') + t.matchSnapshot(result(), 'printed the expected output') }) - t.test('one arg -- .', async t => { - await distTag.execWorkspaces(['.'], []) - t.matchSnapshot(result, 'printed the expected output') + t.test('one arg -- cwd', async t => { + const { result } = await mockWorkspaces(t, ['.']) + t.matchSnapshot(result(), 'printed the expected output') }) t.test('one arg -- .@1, ignores version spec', async t => { - await distTag.execWorkspaces(['.@'], []) - t.matchSnapshot(result, 'printed the expected output') + const { result } = await mockWorkspaces(t, ['.@']) + t.matchSnapshot(result(), 'printed the expected output') }) t.test('one arg -- list', async t => { - await distTag.execWorkspaces(['list'], []) - t.matchSnapshot(result, 'printed the expected output') + const { result } = await mockWorkspaces(t, ['list']) + t.matchSnapshot(result(), 'printed the expected output') }) - t.test('two args -- list, .', async t => { - await distTag.execWorkspaces(['list', '.'], []) - t.matchSnapshot(result, 'printed the expected output') + t.test('two args -- list, cwd', async t => { + const { result } = await mockWorkspaces(t, ['list', '.']) + t.matchSnapshot(result(), 'printed the expected output') }) t.test('two args -- list, .@1, ignores version spec', async t => { - await distTag.execWorkspaces(['list', '.@'], []) - t.matchSnapshot(result, 'printed the expected output') + const { result } = await mockWorkspaces(t, ['list', '.@']) + t.matchSnapshot(result(), 'printed the expected output') }) t.test('two args -- list, @scoped/pkg, logs a warning and ignores workspaces', async t => { - await distTag.execWorkspaces(['list', '@scoped/pkg'], []) - t.match(log, 'Ignoring workspaces for specified package', 'logs a warning') - t.matchSnapshot(result, 'printed the expected output') + const { result, logs } = await mockWorkspaces(t, ['list', '@scoped/pkg']) + t.match(logs(), 'Ignoring workspaces for specified package', 'logs a warning') + t.matchSnapshot(result(), 'printed the expected output') }) t.test('no args, one failing workspace sets exitCode to 1', async t => { - npm.localPrefix = t.testdir({ + const { result, logs } = await mockWorkspaces(t, [], true, { 'package.json': JSON.stringify({ name: 'root', version: '1.0.0', workspaces: ['workspace-a', 'workspace-b', 'workspace-c', 'workspace-d'], }), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), - }, - 'workspace-c': { - 'package.json': JSON.stringify({ - name: 'workspace-c', - version: '1.0.0', - }), - }, + 'workspace-d': { 'package.json': JSON.stringify({ name: 'workspace-d', @@ -274,52 +273,41 @@ t.test('workspaces', t => { }, }) - await distTag.execWorkspaces([], []) - t.equal(process.exitCode, 1, 'set the error status') - process.exitCode = 0 - t.match(log, 'dist-tag ls Couldn\'t get dist-tag data for workspace-d@*', 'logs the error') - t.matchSnapshot(result, 'printed the expected output') + t.match(logs(), 'dist-tag ls Couldn\'t get dist-tag data for workspace-d@*', 'logs the error') + t.matchSnapshot(result(), 'printed the expected output') }) - - t.end() }) t.test('add new tag', async t => { - const _nrf = npmRegistryFetchMock - t.teardown(() => { - npmRegistryFetchMock = _nrf - }) - - npmRegistryFetchMock = async (url, opts) => { - t.equal(opts.method, 'PUT', 'should trigger request to add new tag') - t.equal(opts.body, '7.7.7', 'should point to expected version') - } - npm.prefix = t.testdir({}) + const { distTag, result, fetchOpts } = await mockDist(t) await distTag.exec(['add', '@scoped/another@7.7.7', 'c']) + const opts = fetchOpts() + t.equal(opts.method, 'PUT', 'should trigger request to add new tag') + t.equal(opts.body, '"7.7.7"', 'should point to expected version') t.matchSnapshot( - result, + result(), 'should return success msg' ) }) t.test('add using valid semver range as name', async t => { - npm.prefix = t.testdir({}) + const { distTag, logs } = await mockDist(t) await t.rejects( distTag.exec(['add', '@scoped/another@7.7.7', '1.0.0']), /Tag name must not be a valid SemVer range: 1.0.0/, 'should exit with semver range error' ) t.matchSnapshot( - log, + logs(), 'should return success msg' ) }) t.test('add missing args', async t => { - npm.prefix = t.testdir({}) - config.tag = '' - t.teardown(() => { - delete config.tag + const { distTag } = await mockDist(t, { + config: { + tag: '', + }, }) await t.rejects( distTag.exec(['add', '@scoped/another@7.7.7']), @@ -329,7 +317,7 @@ t.test('add missing args', async t => { }) t.test('add missing pkg name', async t => { - npm.prefix = t.testdir({}) + const { distTag } = await mockDist(t) await t.rejects( distTag.exec(['add', null]), distTag.usage, @@ -338,41 +326,35 @@ t.test('add missing pkg name', async t => { }) t.test('set existing version', async t => { - npm.prefix = t.testdir({}) + const { distTag, logs } = await mockDist(t) await distTag.exec(['set', '@scoped/another@0.6.0', 'b']) t.matchSnapshot( - log, + logs(), 'should log warn msg' ) }) t.test('remove existing tag', async t => { - const _nrf = npmRegistryFetchMock - t.teardown(() => { - npmRegistryFetchMock = _nrf - }) - - npmRegistryFetchMock = async (url, opts) => { - t.equal(opts.method, 'DELETE', 'should trigger request to remove tag') - } - npm.prefix = t.testdir({}) + const { distTag, result, logs, fetchOpts } = await mockDist(t) await distTag.exec(['rm', '@scoped/another', 'c']) - t.matchSnapshot(log, 'should log remove info') - t.matchSnapshot(result, 'should return success msg') + const opts = fetchOpts() + t.equal(opts.method, 'DELETE', 'should trigger request to remove tag') + t.matchSnapshot(logs(), 'should log remove info') + t.matchSnapshot(result(), 'should return success msg') }) t.test('remove non-existing tag', async t => { - npm.prefix = t.testdir({}) + const { distTag, logs } = await mockDist(t) await t.rejects( distTag.exec(['rm', '@scoped/another', 'nonexistent']), /nonexistent is not a dist-tag on @scoped\/another/, 'should exit with error' ) - t.matchSnapshot(log, 'should log error msg') + t.matchSnapshot(logs(), 'should log error msg') }) t.test('remove missing pkg name', async t => { - npm.prefix = t.testdir({}) + const { distTag } = await mockDist(t) await t.rejects( distTag.exec(['rm', null]), distTag.usage, @@ -381,14 +363,12 @@ t.test('remove missing pkg name', async t => { }) t.test('completion', async t => { - const { completion } = distTag - t.plan(2) + const { distTag } = await mockDist(t) - const match = completion({ conf: { argv: { remain: ['npm', 'dist-tag'] } } }) + const match = distTag.completion(['npm', 'dist-tag']) t.resolveMatch(match, ['add', 'rm', 'ls'], 'should list npm dist-tag commands for completion') - const noMatch = completion({ conf: { argv: { remain: ['npm', 'dist-tag', 'foobar'] } } }) + const noMatch = distTag.completion(['npm', 'dist-tag', 'foobar']) t.resolveMatch(noMatch, []) - t.end() }) diff --git a/deps/npm/test/lib/commands/docs.js b/deps/npm/test/lib/commands/docs.js index b2a65786bf4d8e..e11df6b07bc5e6 100644 --- a/deps/npm/test/lib/commands/docs.js +++ b/deps/npm/test/lib/commands/docs.js @@ -1,46 +1,48 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm.js') -const { join, sep } = require('path') +const mockNpm = require('../../fixtures/mock-npm.js') +const { sep } = require('path') -const pkgDirs = t.testdir({ - 'package.json': JSON.stringify({ - name: 'thispkg', - version: '1.2.3', - homepage: 'https://example.com', - }), - nodocs: { +const fixtures = { + pkg: { 'package.json': JSON.stringify({ - name: 'nodocs', + name: 'thispkg', version: '1.2.3', + homepage: 'https://example.com', }), - }, - docsurl: { - 'package.json': JSON.stringify({ - name: 'docsurl', - version: '1.2.3', - homepage: 'https://bugzilla.localhost/docsurl', - }), - }, - repourl: { - 'package.json': JSON.stringify({ - name: 'repourl', - version: '1.2.3', - repository: 'https://github.com/foo/repourl', - }), - }, - repoobj: { - 'package.json': JSON.stringify({ - name: 'repoobj', - version: '1.2.3', - repository: { url: 'https://github.com/foo/repoobj' }, - }), - }, - repourlobj: { - 'package.json': JSON.stringify({ - name: 'repourlobj', - version: '1.2.3', - repository: { url: { works: false } }, - }), + nodocs: { + 'package.json': JSON.stringify({ + name: 'nodocs', + version: '1.2.3', + }), + }, + docsurl: { + 'package.json': JSON.stringify({ + name: 'docsurl', + version: '1.2.3', + homepage: 'https://bugzilla.localhost/docsurl', + }), + }, + repourl: { + 'package.json': JSON.stringify({ + name: 'repourl', + version: '1.2.3', + repository: 'https://github.com/foo/repourl', + }), + }, + repoobj: { + 'package.json': JSON.stringify({ + name: 'repoobj', + version: '1.2.3', + repository: { url: 'https://github.com/foo/repoobj' }, + }), + }, + repourlobj: { + 'package.json': JSON.stringify({ + name: 'repourlobj', + version: '1.2.3', + repository: { url: { works: false } }, + }), + }, }, workspaces: { 'package.json': JSON.stringify({ @@ -69,26 +71,31 @@ const pkgDirs = t.testdir({ }, }), }, -}) - -// keep a tally of which urls got opened -let opened = {} -const openUrl = async (npm, url, errMsg) => { - opened[url] = opened[url] || 0 - opened[url]++ } -const Docs = t.mock('../../../lib/commands/docs.js', { - '../../../lib/utils/open-url.js': openUrl, -}) -const flatOptions = {} -const npm = mockNpm({ flatOptions }) -const docs = new Docs(npm) +const setup = async (t, { prefixDir = fixtures.pkg, config } = {}) => { + // keep a tally of which urls got opened + const opened = {} + const openUrl = async (_, url) => { + opened[url] = opened[url] || 0 + opened[url]++ + } -t.afterEach(() => opened = {}) + const res = await mockNpm(t, { + prefixDir, + mocks: { + '{LIB}/utils/open-url.js': openUrl, + }, + config, + }) + + return { + ...res, + opened, + } +} -t.test('open docs urls', t => { - npm.localPrefix = pkgDirs +t.test('open docs urls', async t => { const expect = { nodocs: 'https://www.npmjs.com/package/nodocs', docsurl: 'https://bugzilla.localhost/docsurl', @@ -97,51 +104,60 @@ t.test('open docs urls', t => { repourlobj: 'https://www.npmjs.com/package/repourlobj', '.': 'https://example.com', } - const keys = Object.keys(expect) - t.plan(keys.length) - keys.forEach(pkg => { - t.test(pkg, async t => { - await docs.exec([['.', pkg].join(sep)]) - const url = expect[pkg] - t.match({ - [url]: 1, - }, opened, `opened ${url}`, { opened }) + + for (const [key, url] of Object.entries(expect)) { + await t.test(`open ${key} url`, async t => { + const { npm, opened } = await setup(t) + await npm.exec('docs', [['.', key].join(sep)]) + t.strictSame({ [url]: 1 }, opened, `opened ${url}`) }) - }) + } }) t.test('open default package if none specified', async t => { - await docs.exec([]) - t.equal(opened['https://example.com'], 1, 'opened expected url', { opened }) + const { npm, opened } = await setup(t) + + await npm.exec('docs', []) + t.strictSame({ 'https://example.com': 1 }, opened, 'opened expected url') }) -t.test('workspaces', (t) => { - npm.localPrefix = join(pkgDirs, 'workspaces') - t.test('all workspaces', async t => { - await docs.execWorkspaces([], []) - t.match({ +t.test('workspaces', async (t) => { + await t.test('all workspaces', async t => { + const { npm, opened } = await setup(t, { + prefixDir: fixtures.workspaces, + config: { workspaces: true }, + }) + await npm.exec('docs', []) + t.strictSame({ 'http://docs.workspace-a/': 1, 'https://github.com/npm/workspace-b#readme': 1, }, opened, 'opened two valid docs urls') }) - t.test('one workspace', async t => { - await docs.execWorkspaces([], ['workspace-a']) - t.match({ + await t.test('one workspace', async t => { + const { npm, opened } = await setup(t, { + prefixDir: fixtures.workspaces, + config: { workspace: 'workspace-a' }, + }) + await npm.exec('docs', []) + t.strictSame({ 'http://docs.workspace-a/': 1, }, opened, 'opened one requested docs urls') }) - t.test('invalid workspace', async t => { + await t.test('invalid workspace', async t => { + const { npm, opened } = await setup(t, { + prefixDir: fixtures.workspaces, + config: { workspace: 'workspace-x' }, + }) await t.rejects( - docs.execWorkspaces([], ['workspace-x']), + npm.exec('docs', []), /No workspaces found/ ) await t.rejects( - docs.execWorkspaces([], ['workspace-x']), + npm.exec('docs', []), /workspace-x/ ) t.match({}, opened, 'opened no docs urls') }) - t.end() }) diff --git a/deps/npm/test/lib/commands/doctor.js b/deps/npm/test/lib/commands/doctor.js index a4602183e69383..d1a88299e69ae9 100644 --- a/deps/npm/test/lib/commands/doctor.js +++ b/deps/npm/test/lib/commands/doctor.js @@ -46,10 +46,7 @@ const dirs = { }, globalPrefixDir: { bin: {}, - lib: { - node_modules: { - }, - }, + node_modules: {}, }, } @@ -57,26 +54,25 @@ const globals = ({ globalPrefix }) => { return { process: { 'env.PATH': `${globalPrefix}:${path.join(globalPrefix, 'bin')}`, - platform: 'test-not-windows', version: 'v1.0.0', }, } } -// getuid and getgid do not exist in windows, so we shim them -// to return 0, as that is the value that lstat will assign the -// gid and uid properties for fs.Stats objects -if (process.platform === 'win32') { - mockGlobals(t, { - process: { - getuid: () => 0, - getgid: () => 0, - }, - }) -} +mockGlobals(t, { + process: { + // set platform to not-windows before any tests because mockNpm + // sets the platform specific location of node_modules based on it + platform: 'test-not-windows', + // getuid and getgid do not exist in windows, so we shim them + // to return 0, as that is the value that lstat will assign the + // gid and uid properties for fs.Stats objects + ...(process.platform === 'win32' ? { getuid: () => 0, getgid: () => 0 } : {}), + }, +}) const mocks = { - '../../package.json': { version: '1.0.0' }, + '{ROOT}/package.json': { version: '1.0.0' }, which: async () => '/path/to/git', cacache: { verify: () => { @@ -106,13 +102,15 @@ t.test('all clear in color', async t => { mocks, globals, ...dirs, + config: { + color: 'always', + }, }) tnock(t, npm.config.get('registry')) .get('/-/ping?write=true').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) - npm.config.set('color', 'always') await npm.exec('doctor', []) t.matchSnapshot(joinedOutput(), 'everything is ok in color') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') @@ -178,13 +176,15 @@ t.test('ping 404 in color', async t => { mocks, globals, ...dirs, + config: { + color: 'always', + }, }) tnock(t, npm.config.get('registry')) .get('/-/ping?write=true').reply(404, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) - npm.config.set('color', 'always') await t.rejects(npm.exec('doctor', [])) t.matchSnapshot(joinedOutput(), 'ping 404 in color') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') @@ -247,7 +247,6 @@ t.test('node out of date - lts', async t => { ...g, process: { ...g.process, - platform: 'test-not-windows', version: 'v0.0.1', }, } @@ -358,6 +357,7 @@ t.test('missing global directories', async t => { mocks, globals, prefixDir: dirs.prefixDir, + globalPrefixDir: {}, }) tnock(t, npm.config.get('registry')) .get('/-/ping?write=true').reply(200, '{}') diff --git a/deps/npm/test/lib/commands/edit.js b/deps/npm/test/lib/commands/edit.js index dc7114892970d7..02621f1aef9825 100644 --- a/deps/npm/test/lib/commands/edit.js +++ b/deps/npm/test/lib/commands/edit.js @@ -9,7 +9,7 @@ const npmConfig = { config: { 'ignore-scripts': false, editor: 'testeditor', - scriptShell: process.platform === 'win32' ? process.env.COMSPEC : 'sh', + 'script-shell': process.platform === 'win32' ? process.env.COMSPEC : 'sh', }, prefixDir: { node_modules: { @@ -38,7 +38,7 @@ t.test('npm edit', async t => { const semverPath = path.resolve(npm.prefix, 'node_modules', 'semver') spawk.spawn('testeditor', [semverPath]) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'testinstall'] : ['-c', 'testinstall'] @@ -54,7 +54,7 @@ t.test('rebuild failure', async t => { const semverPath = path.resolve(npm.prefix, 'node_modules', 'semver') spawk.spawn('testeditor', [semverPath]) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'testinstall'] : ['-c', 'testinstall'] @@ -89,7 +89,7 @@ t.test('npm edit editor has flags', async t => { const semverPath = path.resolve(npm.prefix, 'node_modules', 'semver') spawk.spawn('testeditor', ['--flag', semverPath]) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'testinstall'] : ['-c', 'testinstall'] diff --git a/deps/npm/test/lib/commands/exec.js b/deps/npm/test/lib/commands/exec.js index 1a03b1a2e6a5b4..2fd11f40379f1f 100644 --- a/deps/npm/test/lib/commands/exec.js +++ b/deps/npm/test/lib/commands/exec.js @@ -38,9 +38,6 @@ t.test('registry package', async t => { require('fs').writeFileSync('npm-exec-test-success', '')`, }, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await registry.package({ @@ -75,9 +72,6 @@ t.test('--prefix', async t => { require('fs').writeFileSync('npm-exec-test-success', '')`, }, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) // This is what `--prefix` does @@ -125,9 +119,6 @@ t.test('workspaces', async t => { }), }, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await registry.package({ manifest, diff --git a/deps/npm/test/lib/commands/explain.js b/deps/npm/test/lib/commands/explain.js index 71bb1752205c36..3262dfdce87af8 100644 --- a/deps/npm/test/lib/commands/explain.js +++ b/deps/npm/test/lib/commands/explain.js @@ -1,37 +1,33 @@ const t = require('tap') -const npm = { - prefix: null, - color: true, - flatOptions: { workspacesEnabled: true }, - output: (...args) => { - OUTPUT.push(args) - }, - config: { - validate: () => {}, - get: (key) => { - if (key === 'location') { - return 'project' - } - }, - isDefault: () => {}, - }, -} const { resolve } = require('path') +const mockNpm = require('../../fixtures/mock-npm.js') -const OUTPUT = [] +const mockExplain = async (t, opts) => { + const mock = await mockNpm(t, { + mocks: { + // keep the snapshots pared down a bit, since this has its own tests. + '{LIB}/utils/explain-dep.js': { + explainNode: (expl, depth, color) => { + return `${expl.name}@${expl.version} depth=${depth} color=${color}` + }, + }, + }, + ...opts, + }) -const Explain = t.mock('../../../lib/commands/explain.js', { + const usage = await mock.npm.cmd('explain').then(c => c.usage) - // keep the snapshots pared down a bit, since this has its own tests. - '../../../lib/utils/explain-dep.js': { - explainNode: (expl, depth, color) => { - return `${expl.name}@${expl.version} depth=${depth} color=${color}` + return { + ...mock, + explain: { + usage, + exec: (args) => mock.npm.exec('explain', args), }, - }, -}) -const explain = new Explain(npm) + } +} t.test('no args throws usage', async t => { + const { explain } = await mockExplain(t) await t.rejects( explain.exec([]), explain.usage @@ -39,7 +35,7 @@ t.test('no args throws usage', async t => { }) t.test('no match throws not found', async t => { - npm.prefix = t.testdir() + const { explain } = await mockExplain(t) await t.rejects( explain.exec(['foo@1.2.3', 'node_modules/baz']), 'No dependencies found matching foo@1.2.3, node_modules/baz' @@ -47,7 +43,7 @@ t.test('no match throws not found', async t => { }) t.test('invalid package name throws not found', async t => { - npm.prefix = t.testdir() + const { explain } = await mockExplain(t) const badName = ' not a valid package name ' await t.rejects( explain.exec([`${badName}@1.2.3`]), @@ -55,96 +51,106 @@ t.test('invalid package name throws not found', async t => { ) }) -t.test('explain some nodes', t => { - t.afterEach(() => { - OUTPUT.length = 0 - npm.flatOptions.json = false - }) - - npm.prefix = t.testdir({ - node_modules: { - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.2.3', - dependencies: { - bar: '*', - }, - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.2.3', - }), - }, - baz: { - 'package.json': JSON.stringify({ - name: 'baz', - version: '1.2.3', - dependencies: { - foo: '*', - bar: '2', - }, - }), +t.test('explain some nodes', async t => { + const mockNodes = async (t, config = {}) => { + const mock = await mockExplain(t, { + prefixDir: { node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.2.3', + dependencies: { + bar: '*', + }, + }), + }, bar: { 'package.json': JSON.stringify({ name: 'bar', - version: '2.3.4', + version: '1.2.3', }), }, - extra: { + baz: { 'package.json': JSON.stringify({ - name: 'extra', - version: '99.9999.999999', - description: 'extraneous package', + name: 'baz', + version: '1.2.3', + dependencies: { + foo: '*', + bar: '2', + }, }), + node_modules: { + bar: { + 'package.json': JSON.stringify({ + name: 'bar', + version: '2.3.4', + }), + }, + extra: { + 'package.json': JSON.stringify({ + name: 'extra', + version: '99.9999.999999', + description: 'extraneous package', + }), + }, + }, }, }, + 'package.json': JSON.stringify({ + dependencies: { + baz: '1', + }, + }), }, - }, - 'package.json': JSON.stringify({ - dependencies: { - baz: '1', + config: { + color: 'always', + ...config, }, - }), - }) + }) + + return mock + } t.test('works with the location', async t => { const path = 'node_modules/foo' + const { explain, joinedOutput } = await mockNodes(t) await explain.exec([path]) - t.strictSame(OUTPUT, [['foo@1.2.3 depth=Infinity color=true']]) + t.strictSame(joinedOutput(), 'foo@1.2.3 depth=Infinity color=true') }) t.test('works with a full actual path', async t => { + const { npm, explain, joinedOutput } = await mockNodes(t) const path = resolve(npm.prefix, 'node_modules/foo') await explain.exec([path]) - t.strictSame(OUTPUT, [['foo@1.2.3 depth=Infinity color=true']]) + t.strictSame(joinedOutput(), 'foo@1.2.3 depth=Infinity color=true') }) t.test('finds all nodes by name', async t => { + const { explain, joinedOutput } = await mockNodes(t) await explain.exec(['bar']) - t.strictSame(OUTPUT, [[ + t.strictSame(joinedOutput(), 'bar@1.2.3 depth=Infinity color=true\n\n' + - 'bar@2.3.4 depth=Infinity color=true', - ]]) + 'bar@2.3.4 depth=Infinity color=true' + ) }) t.test('finds only nodes that match the spec', async t => { + const { explain, joinedOutput } = await mockNodes(t) await explain.exec(['bar@1']) - t.strictSame(OUTPUT, [['bar@1.2.3 depth=Infinity color=true']]) + t.strictSame(joinedOutput(), 'bar@1.2.3 depth=Infinity color=true') }) t.test('finds extraneous nodes', async t => { + const { explain, joinedOutput } = await mockNodes(t) await explain.exec(['extra']) - t.strictSame(OUTPUT, [['extra@99.9999.999999 depth=Infinity color=true']]) + t.strictSame(joinedOutput(), 'extra@99.9999.999999 depth=Infinity color=true') }) t.test('json output', async t => { - npm.flatOptions.json = true + const { explain, joinedOutput } = await mockNodes(t, { json: true }) await explain.exec(['node_modules/foo']) - t.match(JSON.parse(OUTPUT[0][0]), [{ + t.match(JSON.parse(joinedOutput()), [{ name: 'foo', version: '1.2.3', dependents: Array, @@ -152,182 +158,126 @@ t.test('explain some nodes', t => { }) t.test('report if no nodes found', async t => { + const { explain } = await mockNodes(t) await t.rejects( explain.exec(['asdf/foo/bar', 'quux@1.x']), 'No dependencies found matching asdf/foo/bar, quux@1.x' ) }) - t.end() }) t.test('workspaces', async t => { - npm.localPrefix = npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-project', - version: '1.0.0', - workspaces: ['packages/*'], - dependencies: { - abbrev: '^1.0.0', - }, - }), - node_modules: { - a: t.fixture('symlink', '../packages/a'), - b: t.fixture('symlink', '../packages/b'), - c: t.fixture('symlink', '../packages/c'), - once: { + const mockWorkspaces = async (t, exec = [], workspaces = true) => { + const mock = await mockExplain(t, { + prefixDir: { 'package.json': JSON.stringify({ - name: 'once', + name: 'workspaces-project', version: '1.0.0', + workspaces: ['packages/*'], dependencies: { - wrappy: '2.0.0', + abbrev: '^1.0.0', }, }), - }, - abbrev: { - 'package.json': JSON.stringify({ - name: 'abbrev', - version: '1.0.0', - }), - }, - wrappy: { - 'package.json': JSON.stringify({ - name: 'wrappy', - version: '2.0.0', - }), - }, - }, - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - once: '1.0.0', + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: t.fixture('symlink', '../packages/c'), + once: { + 'package.json': JSON.stringify({ + name: 'once', + version: '1.0.0', + dependencies: { + wrappy: '2.0.0', + }, + }), }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - abbrev: '^1.0.0', + abbrev: { + 'package.json': JSON.stringify({ + name: 'abbrev', + version: '1.0.0', + }), }, - }), + wrappy: { + 'package.json': JSON.stringify({ + name: 'wrappy', + version: '2.0.0', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + once: '1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + abbrev: '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, + }, }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), + config: { + ...(typeof workspaces === 'boolean' ? { workspaces } : { workspace: workspaces }), + color: 'always', }, - }, - }) + }) - await explain.exec(['wrappy']) - t.strictSame( - OUTPUT, - [['wrappy@2.0.0 depth=Infinity color=true']], - 'should explain workspaces deps' - ) - OUTPUT.length = 0 + await mock.explain.exec(exec) - await explain.execWorkspaces(['wrappy'], ['a']) + return mock.joinedOutput() + } - t.strictSame( - OUTPUT, - [ - ['wrappy@2.0.0 depth=Infinity color=true'], - ], - 'should explain deps when filtering to a single ws' - ) - OUTPUT.length = 0 + t.test('should explain workspaces deps', async t => { + const OUTPUT = await mockWorkspaces(t, ['wrappy']) + t.strictSame( + OUTPUT, + 'wrappy@2.0.0 depth=Infinity color=true' + ) + }) - await explain.execWorkspaces(['abbrev'], []) - t.strictSame( - OUTPUT, - [ - ['abbrev@1.0.0 depth=Infinity color=true'], - ], - 'should explain deps of workspaces only' - ) - OUTPUT.length = 0 + t.test('should explain deps when filtering to a single ws', async t => { + const OUTPUT = await mockWorkspaces(t, ['wrappy'], ['a']) + t.strictSame( + OUTPUT, + 'wrappy@2.0.0 depth=Infinity color=true' + ) + }) - await t.rejects( - explain.execWorkspaces(['abbrev'], ['a']), - 'No dependencies found matching abbrev', - 'should throw usage if dep not found within filtered ws' - ) -}) + t.test('should explain deps of workspaces only', async t => { + const OUTPUT = await mockWorkspaces(t, ['abbrev']) + t.strictSame( + OUTPUT, + 'abbrev@1.0.0 depth=Infinity color=true' + ) + }) -t.test('workspaces disabled', async t => { - npm.localPrefix = npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-project', - version: '1.0.0', - workspaces: ['packages/*'], - dependencies: { - abbrev: '^1.0.0', - }, - }), - node_modules: { - a: t.fixture('symlink', '../packages/a'), - b: t.fixture('symlink', '../packages/b'), - c: t.fixture('symlink', '../packages/c'), - once: { - 'package.json': JSON.stringify({ - name: 'once', - version: '1.0.0', - dependencies: { - wrappy: '2.0.0', - }, - }), - }, - abbrev: { - 'package.json': JSON.stringify({ - name: 'abbrev', - version: '1.0.0', - }), - }, - wrappy: { - 'package.json': JSON.stringify({ - name: 'wrappy', - version: '2.0.0', - }), - }, - }, - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - once: '1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - abbrev: '^1.0.0', - }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), - }, - }, + t.test('should throw usage if dep not found within filtered ws', async t => { + await t.rejects( + mockWorkspaces(t, ['abbrev'], ['a']), + 'No dependencies found matching abbrev' + ) }) - npm.flatOptions.workspacesEnabled = false - await t.rejects( - explain.exec(['once']), - 'No dependencies found matching once', - 'should throw usage if dep not found when excluding ws' - ) + t.test('workspaces disabled', async t => { + await t.rejects( + mockWorkspaces(t, ['once'], false), + 'No dependencies found matching once', + 'should throw usage if dep not found when excluding ws' + ) + }) }) diff --git a/deps/npm/test/lib/commands/explore.js b/deps/npm/test/lib/commands/explore.js index af6f4df908677b..786a34a8e29882 100644 --- a/deps/npm/test/lib/commands/explore.js +++ b/deps/npm/test/lib/commands/explore.js @@ -1,300 +1,149 @@ const t = require('tap') - -let RPJ_ERROR = null -let RPJ_CALLED = '' -const mockRPJ = async path => { - if (RPJ_ERROR) { - try { +const mockNpm = require('../../fixtures/mock-npm') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + +const mockExplore = async (t, exec, { + RPJ_ERROR = null, + RUN_SCRIPT_ERROR = null, + RUN_SCRIPT_EXIT_CODE = 0, + RUN_SCRIPT_SIGNAL = null, +} = {}) => { + let RPJ_CALLED = '' + const mockRPJ = async path => { + if (RPJ_ERROR) { throw RPJ_ERROR - } finally { - RPJ_ERROR = null } + RPJ_CALLED = cleanCwd(path) + return { some: 'package' } } - RPJ_CALLED = path - return { some: 'package' } -} -let RUN_SCRIPT_ERROR = null -let RUN_SCRIPT_EXIT_CODE = 0 -let RUN_SCRIPT_SIGNAL = null -let RUN_SCRIPT_EXEC = null -const mockRunScript = ({ pkg, banner, path, event, stdio }) => { - if (event !== '_explore') { - throw new Error('got wrong event name') - } + let RUN_SCRIPT_EXEC = null + const mockRunScript = ({ pkg, event }) => { + if (event !== '_explore') { + throw new Error('got wrong event name') + } - RUN_SCRIPT_EXEC = pkg.scripts._explore + RUN_SCRIPT_EXEC = pkg.scripts._explore - if (RUN_SCRIPT_ERROR) { - try { + if (RUN_SCRIPT_ERROR) { return Promise.reject(RUN_SCRIPT_ERROR) - } finally { - RUN_SCRIPT_ERROR = null } - } - if (RUN_SCRIPT_EXIT_CODE || RUN_SCRIPT_SIGNAL) { - return Promise.reject(Object.assign(new Error('command failed'), { - code: RUN_SCRIPT_EXIT_CODE, - signal: RUN_SCRIPT_SIGNAL, - })) - } + if (RUN_SCRIPT_EXIT_CODE || RUN_SCRIPT_SIGNAL) { + return Promise.reject(Object.assign(new Error('command failed'), { + code: RUN_SCRIPT_EXIT_CODE, + signal: RUN_SCRIPT_SIGNAL, + })) + } - return Promise.resolve({ code: 0, signal: null }) -} + return Promise.resolve({ code: 0, signal: null }) + } -const output = [] -const logs = [] -const getExplore = (windows) => { - const Explore = t.mock('../../../lib/commands/explore.js', { - path: require('path')[windows ? 'win32' : 'posix'], - 'read-package-json-fast': mockRPJ, - '@npmcli/run-script': mockRunScript, - 'proc-log': { - error: (...msg) => logs.push(msg), - warn: () => {}, - }, - npmlog: { - disableProgress: () => {}, - enableProgress: () => {}, - }, - }) - const npm = { - dir: windows ? 'c:\\npm\\dir' : '/npm/dir', - flatOptions: { - shell: 'shell-command', - }, - output: out => { - output.push(out) + const mock = await mockNpm(t, { + mocks: { + 'read-package-json-fast': mockRPJ, + '@npmcli/run-script': mockRunScript, }, config: { - validate: () => {}, + shell: 'shell-command', }, - } - return new Explore(npm) -} - -const windowsExplore = getExplore(true) -const posixExplore = getExplore(false) - -t.test('basic interactive', t => { - t.afterEach(() => output.length = 0) - - t.test('windows', async t => { - await windowsExplore.exec(['pkg']) - - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', - RUN_SCRIPT_EXEC: 'shell-command', - }) - t.strictSame(output, [ - "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n", - ]) }) - t.test('posix', async t => { - await posixExplore.exec(['pkg']) + await mock.npm.exec('explore', exec) - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'shell-command', - }) - t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", - ]) - }) - - t.end() -}) + return { + ...mock, + RPJ_CALLED, + RUN_SCRIPT_EXEC, + output: cleanCwd(mock.joinedOutput()).trim(), + } +} -t.test('interactive tracks exit code', t => { - const { exitCode } = process - t.beforeEach(() => { - process.exitCode = exitCode - RUN_SCRIPT_EXIT_CODE = 99 - }) - t.afterEach(() => { - RUN_SCRIPT_EXIT_CODE = 0 - output.length = 0 - process.exitCode = exitCode - }) +t.test('basic interactive', async t => { + const { + output, + RPJ_CALLED, + RUN_SCRIPT_EXEC, + } = await mockExplore(t, ['pkg']) - t.test('windows', async t => { - await windowsExplore.exec(['pkg']) + t.match(RPJ_CALLED, /\/pkg\/package.json$/) + t.strictSame(RUN_SCRIPT_EXEC, 'shell-command') + t.match(output, /Exploring \{CWD\}\/[\w-_/]+\nType 'exit' or \^D when finished/) +}) - t.strictSame({ +t.test('interactive tracks exit code', async t => { + t.test('code', async t => { + const { + output, RPJ_CALLED, RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', - RUN_SCRIPT_EXEC: 'shell-command', - }) - t.strictSame(output, [ - "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n", - ]) - t.equal(process.exitCode, 99) - }) + } = await mockExplore(t, ['pkg'], { RUN_SCRIPT_EXIT_CODE: 99 }) - t.test('posix', async t => { - await posixExplore.exec(['pkg']) + t.match(RPJ_CALLED, /\/pkg\/package.json$/) + t.strictSame(RUN_SCRIPT_EXEC, 'shell-command') + t.match(output, /Exploring \{CWD\}\/[\w-_/]+\nType 'exit' or \^D when finished/) - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'shell-command', - }) - t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", - ]) t.equal(process.exitCode, 99) }) - t.test('posix spawn fail', async t => { - RUN_SCRIPT_ERROR = Object.assign(new Error('glorb'), { + t.test('spawn fail', async t => { + const RUN_SCRIPT_ERROR = Object.assign(new Error('glorb'), { code: 33, }) await t.rejects( - posixExplore.exec(['pkg']), + mockExplore(t, ['pkg'], { RUN_SCRIPT_ERROR }), { message: 'glorb', code: 33 } ) - t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", - ]) t.equal(process.exitCode, 33) }) - t.test('posix spawn fail, 0 exit code', async t => { - RUN_SCRIPT_ERROR = Object.assign(new Error('glorb'), { + t.test('spawn fail, 0 exit code', async t => { + const RUN_SCRIPT_ERROR = Object.assign(new Error('glorb'), { code: 0, }) await t.rejects( - posixExplore.exec(['pkg']), + mockExplore(t, ['pkg'], { RUN_SCRIPT_ERROR }), { message: 'glorb', code: 0 } ) - t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", - ]) t.equal(process.exitCode, 1) }) - t.test('posix spawn fail, no exit code', async t => { - RUN_SCRIPT_ERROR = Object.assign(new Error('command failed'), { + t.test('spawn fail, no exit code', async t => { + const RUN_SCRIPT_ERROR = Object.assign(new Error('command failed'), { code: 'EPROBLEM', }) await t.rejects( - posixExplore.exec(['pkg']), + mockExplore(t, ['pkg'], { RUN_SCRIPT_ERROR }), { message: 'command failed', code: 'EPROBLEM' } ) - t.strictSame(output, [ - "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n", - ]) t.equal(process.exitCode, 1) }) - - t.end() }) -t.test('basic non-interactive', t => { - t.afterEach(() => output.length = 0) - - t.test('windows', async t => { - await windowsExplore.exec(['pkg', 'ls']) - - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', - RUN_SCRIPT_EXEC: 'ls', - }) - t.strictSame(output, []) - }) - - t.test('posix', async t => { - await posixExplore.exec(['pkg', 'ls']) +t.test('basic non-interactive', async t => { + const { + output, + RPJ_CALLED, + RUN_SCRIPT_EXEC, + } = await mockExplore(t, ['pkg', 'ls']) - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'ls', - }) - t.strictSame(output, []) - t.end() - }) + t.match(RPJ_CALLED, /\/pkg\/package.json$/) + t.strictSame(RUN_SCRIPT_EXEC, 'ls') - t.end() + t.strictSame(output, '') }) -t.test('signal fails non-interactive', t => { - const { exitCode } = process - t.afterEach(() => { - output.length = 0 - logs.length = 0 - }) - - t.beforeEach(() => { - RUN_SCRIPT_SIGNAL = 'SIGPROBLEM' - RUN_SCRIPT_EXIT_CODE = null - process.exitCode = exitCode - }) - t.afterEach(() => process.exitCode = exitCode) - - t.test('windows', async t => { - await t.rejects( - windowsExplore.exec(['pkg', 'ls']), - { - message: 'command failed', - signal: 'SIGPROBLEM', - } - ) - - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: 'c:\\npm\\dir\\pkg\\package.json', - RUN_SCRIPT_EXEC: 'ls', - }) - t.strictSame(output, []) - }) - - t.test('posix', async t => { - await t.rejects( - posixExplore.exec(['pkg', 'ls']), - { - message: 'command failed', - signal: 'SIGPROBLEM', - } - ) - - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'ls', - }) - t.strictSame(output, []) - t.end() - }) - - t.end() +t.test('signal fails non-interactive', async t => { + await t.rejects( + mockExplore(t, ['pkg', 'ls'], { RUN_SCRIPT_SIGNAL: 'SIGPROBLEM' }), + { + message: 'command failed', + signal: 'SIGPROBLEM', + } + ) }) -t.test('usage if no pkg provided', t => { - t.teardown(() => { - output.length = 0 - }) +t.test('usage if no pkg provided', async t => { const noPkg = [ [], ['foo/../..'], @@ -303,41 +152,22 @@ t.test('usage if no pkg provided', t => { ['..'], ['../..'], ] - t.plan(noPkg.length) + for (const args of noPkg) { t.test(JSON.stringify(args), async t => { await t.rejects( - posixExplore.exec(args), + mockExplore(t, args), 'Usage:' ) - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'ls', - }) }) } }) t.test('pkg not installed', async t => { - t.teardown(() => { - logs.length = 0 - }) - RPJ_ERROR = new Error('plurple') + const RPJ_ERROR = new Error('plurple') await t.rejects( - posixExplore.exec(['pkg', 'ls']), + mockExplore(t, ['pkg', 'ls'], { RPJ_ERROR }), { message: 'plurple' } ) - t.strictSame({ - RPJ_CALLED, - RUN_SCRIPT_EXEC, - }, { - RPJ_CALLED: '/npm/dir/pkg/package.json', - RUN_SCRIPT_EXEC: 'ls', - }) - t.strictSame(output, []) - t.match(logs, [['explore', `It doesn't look like pkg is installed.`]]) }) diff --git a/deps/npm/test/lib/commands/fund.js b/deps/npm/test/lib/commands/fund.js index b82ed93fe5c7ea..277190e7a1a481 100644 --- a/deps/npm/test/lib/commands/fund.js +++ b/deps/npm/test/lib/commands/fund.js @@ -1,7 +1,8 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const mockNpm = require('../../fixtures/mock-npm') const version = '1.0.0' + const funding = { type: 'individual', url: 'http://example.com/donate', @@ -172,78 +173,64 @@ const conflictingFundingPackages = { }, } -let result = '' -let printUrl = '' -const config = { - color: false, - json: false, - global: false, - unicode: false, - which: null, -} -const openUrl = async (npm, url, msg) => { - if (url === 'http://npmjs.org') { - throw new Error('ERROR') - } +const setup = async (t, { openUrl, ...opts } = {}) => { + const openedUrls = [] + + const res = await mockNpm(t, { + ...opts, + mocks: { + '@npmcli/promise-spawn': { open: openUrl || (async url => openedUrls.push(url)) }, + pacote: { + manifest: arg => + arg.name === 'ntl' + ? Promise.resolve({ funding: 'http://example.com/pacote' }) + : Promise.reject(new Error('ERROR')), + }, + ...opts.mocks, + }, + }) - if (config.json) { - printUrl = JSON.stringify({ - title: msg, - url: url, - }) - } else { - printUrl = `${msg}:\n ${url}` + return { + ...res, + openedUrls: () => openedUrls, + fund: (...args) => res.npm.exec('fund', args), } } -const Fund = t.mock('../../../lib/commands/fund.js', { - '../../../lib/utils/open-url.js': openUrl, - pacote: { - manifest: arg => - arg.name === 'ntl' - ? Promise.resolve({ - funding: 'http://example.com/pacote', - }) - : Promise.reject(new Error('ERROR')), - }, -}) -const npm = mockNpm({ - config, - output: msg => { - result += msg + '\n' - }, -}) -const fund = new Fund(npm) -t.afterEach(() => { - printUrl = '' - result = '' -}) t.test('fund with no package containing funding', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'no-funding-package', - version: '0.0.0', - }), + const { fund, joinedOutput } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'no-funding-package', + version: '0.0.0', + }), + }, + config: {}, }) - await fund.exec([]) - t.matchSnapshot(result, 'should print empty funding info') + await fund() + t.matchSnapshot(joinedOutput(), 'should print empty funding info') }) t.test('fund in which same maintainer owns all its deps', async t => { - npm.prefix = t.testdir(maintainerOwnsAllDeps) + const { fund, joinedOutput } = await setup(t, { + prefixDir: maintainerOwnsAllDeps, + config: {}, + }) - await fund.exec([]) - t.matchSnapshot(result, 'should print stack packages together') + await fund() + t.matchSnapshot(joinedOutput(), 'should print stack packages together') }) t.test('fund in which same maintainer owns all its deps, using --json option', async t => { - config.json = true - npm.prefix = t.testdir(maintainerOwnsAllDeps) + const { fund, joinedOutput } = await setup(t, { + prefixDir: maintainerOwnsAllDeps, + config: { json: true }, + }) - await fund.exec([]) + await fund() t.same( - JSON.parse(result), + JSON.parse(joinedOutput()), { length: 3, name: 'maintainer-owns-all-deps', @@ -268,24 +255,27 @@ t.test('fund in which same maintainer owns all its deps, using --json option', a }, 'should print stack packages together' ) - config.json = false }) t.test('fund containing multi-level nested deps with no funding', async t => { - npm.prefix = t.testdir(nestedNoFundingPackages) + const { fund, joinedOutput } = await setup(t, { + prefixDir: nestedNoFundingPackages, + config: {}, + }) - await fund.exec([]) - t.matchSnapshot(result, 'should omit dependencies with no funding declared') - t.end() + await fund() + t.matchSnapshot(joinedOutput(), 'should omit dependencies with no funding declared') }) t.test('fund containing multi-level nested deps with no funding, using --json option', async t => { - npm.prefix = t.testdir(nestedNoFundingPackages) - config.json = true + const { fund, joinedOutput } = await setup(t, { + prefixDir: nestedNoFundingPackages, + config: { json: true }, + }) - await fund.exec([]) + await fund() t.same( - JSON.parse(result), + JSON.parse(joinedOutput()), { length: 2, name: 'nested-no-funding-packages', @@ -303,16 +293,17 @@ t.test('fund containing multi-level nested deps with no funding, using --json op }, 'should omit dependencies with no funding declared in json output' ) - config.json = false }) t.test('fund containing multi-level nested deps with no funding, using --json option', async t => { - npm.prefix = t.testdir(nestedMultipleFundingPackages) - config.json = true + const { fund, joinedOutput } = await setup(t, { + prefixDir: nestedMultipleFundingPackages, + config: { json: true }, + }) - await fund.exec([]) + await fund() t.same( - JSON.parse(result), + JSON.parse(joinedOutput()), { length: 2, name: 'nested-multiple-funding-packages', @@ -355,376 +346,337 @@ t.test('fund containing multi-level nested deps with no funding, using --json op }, 'should list multiple funding entries in json output' ) - config.json = false }) t.test('fund does not support global', async t => { - npm.prefix = t.testdir({}) - config.global = true + const { fund } = await setup(t, { + config: { global: true }, + }) - await t.rejects(fund.exec([]), { code: 'EFUNDGLOBAL' }, 'should throw EFUNDGLOBAL error') - config.global = false + await t.rejects(fund(), { code: 'EFUNDGLOBAL' }, 'should throw EFUNDGLOBAL error') }) t.test('fund using package argument', async t => { - npm.prefix = t.testdir(maintainerOwnsAllDeps) + const { fund, openedUrls, joinedOutput } = await setup(t, { + prefixDir: maintainerOwnsAllDeps, + config: {}, + }) - await fund.exec(['.']) - t.matchSnapshot(printUrl, 'should open funding url') + await fund('.') + t.equal(joinedOutput(), '') + t.strictSame(openedUrls(), ['http://example.com/donate'], 'should open funding url') }) t.test('fund does not support global, using --json option', async t => { - npm.prefix = t.testdir({}) - config.global = true - config.json = true + const { fund } = await setup(t, { + prefixDir: {}, + config: { global: true, json: true }, + }) await t.rejects( - fund.exec([]), + fund(), { code: 'EFUNDGLOBAL', message: '`npm fund` does not support global packages' }, 'should use expected error msg' ) - config.global = false - config.json = false }) t.test('fund using string shorthand', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'funding-string-shorthand', - version: '0.0.0', - funding: 'https://example.com/sponsor', - }), + const { fund, openedUrls } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'funding-string-shorthand', + version: '0.0.0', + funding: 'https://example.com/sponsor', + }), + }, + config: {}, }) - await fund.exec(['.']) - t.matchSnapshot(printUrl, 'should open string-only url') + await fund('.') + t.strictSame(openedUrls(), ['https://example.com/sponsor'], 'should open string-only url') }) t.test('fund using nested packages with multiple sources', async t => { - npm.prefix = t.testdir(nestedMultipleFundingPackages) + const { fund, joinedOutput } = await setup(t, { + prefixDir: nestedMultipleFundingPackages, + config: {}, + }) - await fund.exec(['.']) - t.matchSnapshot(result, 'should prompt with all available URLs') + await fund('.') + t.matchSnapshot(joinedOutput(), 'should prompt with all available URLs') }) t.test('fund using symlink ref', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'using-symlink-ref', - version: '1.0.0', - }), - a: { + const f = 'http://example.com/a' + const { fund, openedUrls } = await setup(t, { + prefixDir: { 'package.json': JSON.stringify({ - name: 'a', + name: 'using-symlink-ref', version: '1.0.0', - funding: 'http://example.com/a', }), + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: f, + }), + }, + node_modules: { + a: t.fixture('symlink', '../a'), + }, }, - node_modules: { - a: t.fixture('symlink', '../a'), - }, + config: {}, }) // using symlinked ref - await fund.exec(['./node_modules/a']) - t.match(printUrl, 'http://example.com/a', 'should retrieve funding url from symlink') - - printUrl = '' - result = '' + await fund('./node_modules/a') + t.strictSame(openedUrls(), [f], 'should retrieve funding url from symlink') // using target ref - await fund.exec(['./a']) - - t.match(printUrl, 'http://example.com/a', 'should retrieve funding url from symlink target') + await fund('./a') + t.strictSame(openedUrls(), [f, f], 'should retrieve funding url from symlink target') }) t.test('fund using data from actual tree', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'using-actual-tree', - version: '1.0.0', - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - funding: 'http://example.com/a', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - funding: 'http://example.com/b', - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.1', - funding: 'http://example.com/_AAA', - }), + const { fund, openedUrls } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'using-actual-tree', + version: '1.0.0', + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: 'http://example.com/a', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: 'http://example.com/b', + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.1', + funding: 'http://example.com/_AAA', + }), + }, }, }, }, }, + config: {}, }) // using symlinked ref - await fund.exec(['a']) - t.match( - printUrl, - 'http://example.com/_AAA', + await fund('a') + t.strictSame( + openedUrls(), + ['http://example.com/_AAA'], 'should retrieve fund info from actual tree, using greatest version found' ) }) t.test('fund using nested packages with multiple sources, with a source number', async t => { - npm.prefix = t.testdir(nestedMultipleFundingPackages) - config.which = '1' + const { fund, openedUrls } = await setup(t, { + prefixDir: nestedMultipleFundingPackages, + config: { which: '1' }, + }) - await fund.exec(['.']) - t.matchSnapshot(printUrl, 'should open the numbered URL') - config.which = null + await fund('.') + t.strictSame(openedUrls(), ['https://one.example.com'], 'should open the numbered URL') }) t.test('fund using pkg name while having conflicting versions', async t => { - npm.prefix = t.testdir(conflictingFundingPackages) - config.which = '1' + const { fund, openedUrls } = await setup(t, { + prefixDir: conflictingFundingPackages, + config: { which: '1' }, + }) - await fund.exec(['foo']) - t.matchSnapshot(printUrl, 'should open greatest version') + await fund('foo') + t.strictSame(openedUrls(), ['http://example.com/2'], 'should open greatest version') +}) + +t.test('fund using bad which value: index too high', async t => { + const { fund, joinedOutput } = await setup(t, { + prefixDir: nestedMultipleFundingPackages, + config: { which: '100' }, + }) + + await fund('foo') + t.match(joinedOutput(), 'not a valid index') + t.matchSnapshot(joinedOutput(), 'should print message about invalid which') }) t.test('fund using package argument with no browser, using --json option', async t => { - npm.prefix = t.testdir(maintainerOwnsAllDeps) - config.json = true + const { fund, openedUrls, joinedOutput } = await setup(t, { + prefixDir: maintainerOwnsAllDeps, + config: { json: true }, + }) - await fund.exec(['.']) + await fund('.') + t.equal(joinedOutput(), '', 'no output') t.same( - JSON.parse(printUrl), - { - title: 'individual funding available at the following URL', - url: 'http://example.com/donate', - }, + openedUrls(), + ['http://example.com/donate'], 'should open funding url using json output' ) - config.json = false }) t.test('fund using package info fetch from registry', async t => { - npm.prefix = t.testdir({}) + const { fund, openedUrls } = await setup(t, { + prefixDir: {}, + config: {}, + }) - await fund.exec(['ntl']) + await fund('ntl') t.match( - printUrl, + openedUrls(), /http:\/\/example.com\/pacote/, 'should open funding url that was loaded from registry manifest' ) }) t.test('fund tries to use package info fetch from registry but registry has nothing', async t => { - npm.prefix = t.testdir({}) + const { fund } = await setup(t, { + prefixDir: {}, + config: {}, + }) await t.rejects( - fund.exec(['foo']), + fund('foo'), { code: 'ENOFUND', message: 'No valid funding method available for: foo' }, 'should have no valid funding message' ) }) t.test('fund but target module has no funding info', async t => { - npm.prefix = t.testdir(nestedNoFundingPackages) + const { fund } = await setup(t, { + prefixDir: nestedNoFundingPackages, + config: {}, + }) await t.rejects( - fund.exec(['foo']), + fund('foo'), { code: 'ENOFUND', message: 'No valid funding method available for: foo' }, 'should have no valid funding message' ) }) t.test('fund using bad which value', async t => { - npm.prefix = t.testdir(nestedMultipleFundingPackages) - config.which = 3 + const { fund } = await setup(t, { + prefixDir: nestedMultipleFundingPackages, + config: { which: '0' }, + }) await t.rejects( - fund.exec(['bar']), + fund('bar'), { code: 'EFUNDNUMBER', - /* eslint-disable-next-line max-len */ - message: '`npm fund [<@scope>/] [--which=fundingSourceNumber]` must be given a positive integer', + message: /must be given a positive integer/, }, 'should have bad which option error message' ) - config.which = null }) t.test('fund pkg missing version number', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - funding: 'http://example.com/foo', - }), + const { fund, joinedOutput } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + funding: 'http://example.com/foo', + }), + }, + config: {}, }) - await fund.exec([]) - t.matchSnapshot(result, 'should print name only') + await fund() + t.matchSnapshot(joinedOutput(), 'should print name only') }) t.test('fund a package throws on openUrl', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - funding: 'http://npmjs.org', - }), + const { fund } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + funding: 'http://npmjs.org', + }), + }, + config: {}, + openUrl: () => { + throw new Error('ERROR') + }, }) - await t.rejects(fund.exec(['.']), { message: 'ERROR' }, 'should throw unknown error') + await t.rejects(fund('.'), { message: 'ERROR' }, 'should throw unknown error') }) t.test('fund a package with type and multiple sources', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - funding: [ - { - type: 'Foo', - url: 'http://example.com/foo', - }, - { - type: 'Lorem', - url: 'http://example.com/foo-lorem', - }, - ], - }), - }) - - await fund.exec(['.']) - t.matchSnapshot(result, 'should print prompt select message') -}) - -t.test('fund colors', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-fund-colors', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^1.0.0', - c: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - funding: 'http://example.com/a', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - funding: 'http://example.com/b', - dependencies: { - d: '^1.0.0', - e: '^1.0.0', + const { fund, joinedOutput } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + funding: [ + { + type: 'Foo', + url: 'http://example.com/foo', }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - funding: 'http://example.com/b', - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - funding: 'http://example.com/d', - }), - }, - e: { - 'package.json': JSON.stringify({ - name: 'e', - version: '1.0.0', - funding: 'http://example.com/e', - }), - }, - }, - }) - npm.color = true - - await fund.exec([]) - t.matchSnapshot(result, 'should print output with color info') - npm.color = false -}) - -t.test('sub dep with fund info and a parent with no funding info', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-multiple-funding-sources', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - c: '^1.0.0', + { + type: 'Lorem', + url: 'http://example.com/foo-lorem', }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - funding: 'http://example.com/b', - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - funding: ['http://example.com/c', 'http://example.com/c-other'], - }), - }, + ], + }), }, + config: {}, }) - await fund.exec([]) - t.matchSnapshot(result, 'should nest sub dep as child of root') + await fund('.') + t.matchSnapshot(joinedOutput(), 'should print prompt select message') }) -t.test('workspaces', async t => { - t.test('filter funding info by a specific workspace', async t => { - npm.localPrefix = npm.prefix = t.testdir({ +t.test('fund colors', async t => { + const { fund, joinedOutput } = await setup(t, { + prefixDir: { 'package.json': JSON.stringify({ - name: 'workspaces-support', + name: 'test-fund-colors', version: '1.0.0', - workspaces: ['packages/*'], dependencies: { - d: '^1.0.0', + a: '^1.0.0', + b: '^1.0.0', + c: '^1.0.0', }, }), node_modules: { - a: t.fixture('symlink', '../packages/a'), - b: t.fixture('symlink', '../packages/b'), + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: 'http://example.com/a', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + funding: 'http://example.com/b', + dependencies: { + d: '^1.0.0', + e: '^1.0.0', + }, + }), + }, c: { 'package.json': JSON.stringify({ name: 'c', version: '1.0.0', - funding: ['http://example.com/c', 'http://example.com/c-other'], + funding: 'http://example.com/b', }), }, d: { @@ -734,13 +686,38 @@ t.test('workspaces', async t => { funding: 'http://example.com/d', }), }, + e: { + 'package.json': JSON.stringify({ + name: 'e', + version: '1.0.0', + funding: 'http://example.com/e', + }), + }, }, - packages: { + }, + config: { color: 'always' }, + }) + + await fund() + t.matchSnapshot(joinedOutput(), 'should print output with color info') +}) + +t.test('sub dep with fund info and a parent with no funding info', async t => { + const { fund, joinedOutput } = await setup(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-multiple-funding-sources', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + b: '^1.0.0', + }, + }), + node_modules: { a: { 'package.json': JSON.stringify({ name: 'a', version: '1.0.0', - funding: 'https://example.com/a', dependencies: { c: '^1.0.0', }, @@ -751,22 +728,97 @@ t.test('workspaces', async t => { name: 'b', version: '1.0.0', funding: 'http://example.com/b', - dependencies: { - d: '^1.0.0', - }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + funding: ['http://example.com/c', 'http://example.com/c-other'], }), }, }, - }) + }, + config: {}, + }) - await fund.execWorkspaces([], ['a']) + await fund() + t.matchSnapshot(joinedOutput(), 'should nest sub dep as child of root') +}) - t.matchSnapshot(result, 'should display only filtered workspace name and its deps') +t.test('workspaces', async t => { + const wsPrefixDir = { + 'package.json': JSON.stringify({ + name: 'workspaces-support', + version: '1.0.0', + workspaces: ['packages/*'], + dependencies: { + d: '^1.0.0', + }, + }), + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + funding: ['http://example.com/c', 'http://example.com/c-other'], + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + funding: 'http://example.com/d', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + funding: 'https://example.com/a', + dependencies: { + c: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + funding: 'http://example.com/b', + dependencies: { + d: '^1.0.0', + }, + }), + }, + }, + } - result = '' + t.test('filter funding info by a specific workspace name', async t => { + const { fund, joinedOutput } = await setup(t, { + prefixDir: wsPrefixDir, + config: { + workspace: 'a', + }, + }) - await fund.execWorkspaces([], ['./packages/a']) + await fund() + t.matchSnapshot(joinedOutput(), 'should display only filtered workspace name and its deps') + }) + + t.test('filter funding info by a specific workspace path', async t => { + const { fund, joinedOutput } = await setup(t, { + prefixDir: wsPrefixDir, + config: { + workspace: './packages/a', + }, + }) - t.matchSnapshot(result, 'should display only filtered workspace path and its deps') + await fund() + t.matchSnapshot(joinedOutput(), 'should display only filtered workspace name and its deps') }) }) diff --git a/deps/npm/test/lib/commands/help-search.js b/deps/npm/test/lib/commands/help-search.js index 7fbeb195d23c7d..ce6e5f7cf00b01 100644 --- a/deps/npm/test/lib/commands/help-search.js +++ b/deps/npm/test/lib/commands/help-search.js @@ -1,130 +1,89 @@ const t = require('tap') -const { join } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const { load: loadMockNpm } = require('../../fixtures/mock-npm.js') const chalk = require('chalk') -const OUTPUT = [] -const output = msg => { - OUTPUT.push(msg) -} - -const config = { - long: false, -} -const npmHelpErr = null -const npm = mockNpm({ - color: false, - config, - flatOptions: { - long: false, +/* eslint-disable max-len */ +const docsFixtures = { + dir1: { + 'npm-exec.md': 'the exec command\nhelp has multiple lines of exec help\none of them references exec', }, - usage: 'npm test usage', - exec: async () => { - if (npmHelpErr) { - throw npmHelpErr - } + dir2: { + 'npm-something.md': 'another\ncommand you run\nthat\nreferences exec\nand has multiple lines\nwith no matches\nthat will be ignored\nand another line\nthat does have exec as well', + 'npm-run-script.md': 'the scripted run-script command runs scripts\nand has lines\nsome of which dont match the string run\nor script\nscript', + 'npm-install.md': 'does a thing in a script\nif a thing does not exist in a thing you run\nto install it and run it maybe in a script', + }, + dir3: { + 'npm-help.md': 'will run the `help-search` command if you need to run it to help you search', + 'npm-help-search.md': 'is the help search command\nthat you get if you run help-search', + 'npm-useless.md': 'exec\nexec', + 'npm-more-useless.md': 'exec exec', + 'npm-extra-useless.md': 'exec\nexec\nexec', }, - output, -}) - -let globRoot = null -const globDir = { - 'npm-exec.md': - 'the exec command\nhelp has multiple lines of exec help\none of them references exec', - /* eslint-disable-next-line max-len */ - 'npm-something.md': 'another\ncommand you run\nthat\nreferences exec\nand has multiple lines\nwith no matches\nthat will be ignored\nand another line\nthat does have exec as well', - /* eslint-disable-next-line max-len */ - 'npm-run-script.md': 'the scripted run-script command runs scripts\nand has lines\nsome of which dont match the string run\nor script\nscript', - /* eslint-disable-next-line max-len */ - 'npm-install.md': 'does a thing in a script\nif a thing does not exist in a thing you run\nto install it and run it maybe in a script', - 'npm-help.md': 'will run the `help-search` command if you need to run it to help you search', - 'npm-help-search.md': 'is the help search command\nthat you get if you run help-search', - 'npm-useless.md': 'exec\nexec', - 'npm-more-useless.md': 'exec exec', - 'npm-extra-useless.md': 'exec\nexec\nexec', } -const glob = (p, cb) => - cb( - null, - Object.keys(globDir).map(file => join(globRoot, file)) - ) +/* eslint-enable max-len */ + +const execHelpSearch = async (t, exec = [], opts) => { + const { npm, ...rest } = await loadMockNpm(t, { + npm: ({ other }) => ({ npmRoot: other }), + // docs/content is hardcoded into the glob path in the command + otherDirs: { + docs: { + content: docsFixtures, + }, + }, + ...opts, + }) -const HelpSearch = t.mock('../../../lib/commands/help-search.js', { - glob, -}) -const helpSearch = new HelpSearch(npm) + await npm.exec('help-search', exec) -t.test('npm help-search', async t => { - globRoot = t.testdir(globDir) - t.teardown(() => { - OUTPUT.length = 0 - globRoot = null - }) + return { npm, output: rest.joinedOutput(), ...rest } +} - await helpSearch.exec(['exec']) +t.test('npm help-search', async t => { + const { output } = await execHelpSearch(t, ['exec']) - t.match(OUTPUT, /Top hits for "exec"/, 'outputs results') + t.match(output, /Top hits for "exec"/, 'outputs results') }) t.test('npm help-search multiple terms', async t => { - globRoot = t.testdir(globDir) - t.teardown(() => { - OUTPUT.length = 0 - globRoot = null - }) + const { output } = await execHelpSearch(t, ['run', 'script']) - await helpSearch.exec(['run', 'script']) - - t.match(OUTPUT, /Top hits for/, 'outputs results') - t.match(OUTPUT, /run:\d+ script:\d+/, 'shows hit counts for both terms') + t.match(output, /Top hits for/, 'outputs results') + t.match(output, /run:\d+ script:\d+/, 'shows hit counts for both terms') }) t.test('npm help-search long output', async t => { - globRoot = t.testdir(globDir) - config.long = true - t.teardown(() => { - OUTPUT.length = 0 - config.long = false - globRoot = null + const { output } = await execHelpSearch(t, ['exec'], { + config: { + long: true, + }, }) - await helpSearch.exec(['exec']) - - t.match(OUTPUT, /has multiple lines of exec help/, 'outputs detailed results') + t.match(output, /has multiple lines of exec help/, 'outputs detailed results') }) t.test('npm help-search long output with color', async t => { - globRoot = t.testdir(globDir) - config.long = true - npm.color = true - t.teardown(() => { - OUTPUT.length = 0 - config.long = false - npm.color = false - globRoot = null + const { output } = await execHelpSearch(t, ['help-search'], { + config: { + long: true, + color: 'always', + }, }) - await helpSearch.exec(['help-search']) - const highlightedText = chalk.bgBlack.red('help-search') t.equal( - OUTPUT.some(line => line.includes(highlightedText)), + output.split('\n').some(line => line.includes(highlightedText)), true, 'returned highlighted search terms' ) }) t.test('npm help-search no args', async t => { - t.rejects(helpSearch.exec([]), /npm help-search/, 'outputs usage') + await t.rejects(execHelpSearch(t), /npm help-search/, 'outputs usage') }) t.test('npm help-search no matches', async t => { - globRoot = t.testdir(globDir) - t.teardown(() => { - OUTPUT.length = 0 - globRoot = null - }) + const { output } = await execHelpSearch(t, ['asdfasdf']) - await helpSearch.exec(['asdfasdf']) - t.match(OUTPUT, /No matches/) + t.match(output, /No matches/) }) diff --git a/deps/npm/test/lib/commands/help.js b/deps/npm/test/lib/commands/help.js index 1e623dab9386e0..d4e7a81f84a4cd 100644 --- a/deps/npm/test/lib/commands/help.js +++ b/deps/npm/test/lib/commands/help.js @@ -1,351 +1,231 @@ const t = require('tap') -const { EventEmitter } = require('events') - -const npmConfig = { - usage: false, - viewer: undefined, - loglevel: undefined, -} - -let helpSearchArgs = null -const OUTPUT = [] -const npm = { - usage: 'test npm usage', - config: { - get: key => npmConfig[key], - set: (key, value) => { - npmConfig[key] = value - }, - parsedArgv: { - cooked: [], - }, - validate: () => {}, - }, - exec: async (cmd, args) => { - if (cmd === 'help-search') { - helpSearchArgs = args - } else if (cmd === 'help') { - return { usage: 'npm help ' } +const localeCompare = require('@isaacs/string-locale-compare')('en') +const { load: loadMockNpm } = require('../../fixtures/mock-npm.js') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + +const genManPages = (obj) => { + const man = {} + const resPages = new Set() + + for (const [section, pages] of Object.entries(obj)) { + const num = parseInt(section, 10) + man[`man${num}`] = {} + + const sectionPages = [] + for (const name of pages) { + man[`man${num}`][`${name}.${section}`] = `.TH "${name.toUpperCase()}" "${num}"` + sectionPages.push(name.replace(/^npm-/, '')) } - }, - deref: cmd => {}, - output: msg => { - OUTPUT.push(msg) - }, -} -const globDefaults = [ - '/root/man/man1/npm-whoami.1', - '/root/man/man5/npmrc.5', - '/root/man/man7/disputes.7', -] - -let globErr = null -let globResult = globDefaults -let globParam -const glob = (p, cb) => { - globParam = p - return cb(globErr, globResult) -} - -let spawnBin = null -let spawnArgs = null -let spawnCode = 0 -const spawn = (bin, args) => { - spawnBin = bin - spawnArgs = args - const spawnEmitter = new EventEmitter() - process.nextTick(() => { - spawnEmitter.emit('exit', spawnCode) - }) - return spawnEmitter -} + // return a sorted list of uniq pages in order to test completion + for (const p of sectionPages.sort(localeCompare)) { + resPages.add(p) + } + } -let openUrlArg = null -const openUrl = async (npm, url, msg) => { - openUrlArg = url + // man directory name is hardcoded in the command + return { fixtures: { man }, pages: [...resPages.values()] } } -const Help = t.mock('../../../lib/commands/help.js', { - '../../../lib/utils/open-url.js': openUrl, - child_process: { - spawn, +const mockHelp = async (t, { + man = { + 1: ['whoami', 'install', 'star', 'unstar', 'uninstall', 'unpublish'].map(p => `npm-${p}`), + 5: ['npmrc', 'install', 'package-json'], + 7: ['disputes', 'config'], }, - glob, -}) -const help = new Help(npm) + browser = false, + woman = false, + exec: execArgs = null, + spawnErr, + ...opts +} = {}) => { + const config = { + // always set viewer to test the same on all platforms + viewer: browser ? 'browser' : woman ? 'woman' : 'man', + ...opts.config, + } + + let args = null + const mockSpawn = async (...a) => { + args = a + if (spawnErr) { + throw spawnErr + } + } + mockSpawn.open = async (url) => args = [cleanCwd(decodeURI(url))] + + const manPages = genManPages(man) + + const { npm, ...rest } = await loadMockNpm(t, { + npm: ({ other }) => ({ npmRoot: other }), + mocks: { '@npmcli/promise-spawn': mockSpawn }, + otherDirs: { ...manPages.fixtures }, + config, + ...opts, + }) + + const help = await npm.cmd('help') + const exec = execArgs + ? await npm.exec('help', execArgs) + : (...a) => npm.exec('help', a) + + return { + npm, + help, + exec, + manPages: manPages.pages, + getArgs: () => args, + ...rest, + } +} t.test('npm help', async t => { - await help.exec([]) + const { exec, joinedOutput } = await mockHelp(t) + await exec() - t.match(OUTPUT, ['test npm usage'], 'showed npm usage') + t.match(joinedOutput(), 'npm ', 'showed npm usage') }) t.test('npm help completion', async t => { - t.teardown(() => { - globErr = null - }) + const { help, manPages } = await mockHelp(t) const noArgs = await help.completion({ conf: { argv: { remain: [] } } }) - t.strictSame(noArgs, ['help', 'whoami', 'npmrc', 'disputes'], 'outputs available help pages') + t.strictSame(noArgs, ['help', ...manPages], 'outputs available help pages') const threeArgs = await help.completion({ conf: { argv: { remain: ['one', 'two', 'three'] } } }) t.strictSame(threeArgs, [], 'outputs no results when more than 2 args are provided') - globErr = new Error('glob failed') - t.rejects( - help.completion({ conf: { argv: { remain: [] } } }), - /glob failed/, - 'glob errors propagate' - ) }) t.test('npm help multiple args calls search', async t => { - t.teardown(() => { - helpSearchArgs = null - }) - - await help.exec(['run', 'script']) + const { joinedOutput } = await mockHelp(t, { exec: ['run', 'script'] }) - t.strictSame(helpSearchArgs, ['run', 'script'], 'passed the args to help-search') + t.match(joinedOutput(), 'No matches in help for: run script', 'calls help-search') }) t.test('npm help no matches calls search', async t => { - globResult = [] - t.teardown(() => { - helpSearchArgs = null - globResult = globDefaults - }) - - await help.exec(['asdfasdf']) - t.strictSame(helpSearchArgs, ['asdfasdf'], 'passed the args to help-search') -}) - -t.test('npm help glob errors propagate', async t => { - globErr = new Error('glob failed') - t.teardown(() => { - globErr = null - spawnBin = null - spawnArgs = null - }) + const { joinedOutput } = await mockHelp(t, { exec: ['asdfasdf'] }) - await t.rejects(help.exec(['whoami']), /glob failed/, 'glob error propagates') + t.match(joinedOutput(), 'No matches in help for: asdfasdf', 'passed the args to help-search') }) t.test('npm help whoami', async t => { - globResult = ['/root/man/man1/npm-whoami.1.xz'] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null - }) - - await help.exec(['whoami']) + const { getArgs } = await mockHelp(t, { exec: ['whoami'] }) + const [spawnBin, spawnArgs] = getArgs() t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, [globResult[0]], 'passes the correct arguments') + t.equal(spawnArgs.length, 1) + t.match(spawnArgs[0], /\/man\/man1\/npm-whoami\.1$/) }) t.test('npm help 1 install', async t => { - npmConfig.viewer = 'browser' - globResult = ['/root/man/man5/install.5', '/root/man/man1/npm-install.1'] - - t.teardown(() => { - npmConfig.viewer = undefined - globResult = globDefaults - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['1', 'install'], + browser: true, }) - await help.exec(['1', 'install']) - - t.match(openUrlArg, /commands(\/|\\)npm-install.html$/, 'attempts to open the correct url') - t.ok(openUrlArg.startsWith('file:///'), 'opens with the correct uri schema') + const [url] = getArgs() + t.match(url, /commands\/npm-install.html$/, 'attempts to open the correct url') + t.ok(url.startsWith('file:///'), 'opens with the correct uri schema') }) t.test('npm help 5 install', async t => { - npmConfig.viewer = 'browser' - globResult = ['/root/man/man5/install.5'] - - t.teardown(() => { - npmConfig.viewer = undefined - globResult = globDefaults - globParam = null - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['5', 'install'], + browser: true, }) - await help.exec(['5', 'install']) - - t.match(globParam, /man5/, 'searches only in man5 folder') - t.match(openUrlArg, /configuring-npm(\/|\\)install.html$/, 'attempts to open the correct url') + const [url] = getArgs() + t.match(url, /configuring-npm\/install.html$/, 'attempts to open the correct url') }) t.test('npm help 7 config', async t => { - npmConfig.viewer = 'browser' - globResult = ['/root/man/man7/config.7'] - t.teardown(() => { - npmConfig.viewer = undefined - globParam = null - globResult = globDefaults - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['7', 'config'], + browser: true, }) - await help.exec(['7', 'config']) - - t.match(globParam, /man7/, 'searches only in man5 folder') - t.match(openUrlArg, /using-npm(\/|\\)config.html$/, 'attempts to open the correct url') + const [url] = getArgs() + t.match(url, /using-npm\/config.html$/, 'attempts to open the correct url') }) t.test('npm help package.json redirects to package-json', async t => { - globResult = ['/root/man/man5/package-json.5'] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['package.json'], }) - await help.exec(['package.json']) - + const [spawnBin, spawnArgs] = getArgs() t.equal(spawnBin, 'man', 'calls man by default') - t.match(globParam, /package-json/, 'glob was asked to find package-json') - t.strictSame(spawnArgs, [globResult[0]], 'passes the correct arguments') + t.equal(spawnArgs.length, 1) + t.match(spawnArgs[0], /\/man\/man5\/package-json\.5$/) }) t.test('npm help ?(un)star', async t => { - npmConfig.viewer = 'woman' - globResult = ['/root/man/man1/npm-star.1', '/root/man/man1/npm-unstar.1'] - t.teardown(() => { - npmConfig.viewer = undefined - globResult = globDefaults - spawnBin = null - spawnArgs = null - }) - - await help.exec(['?(un)star']) - - t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame( - spawnArgs, - ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], - 'passes the correct arguments' - ) -}) - -t.test('npm help - woman viewer propagates errors', async t => { - npmConfig.viewer = 'woman' - spawnCode = 1 - globResult = ['/root/man/man1/npm-star.1', '/root/man/man1/npm-unstar.1'] - t.teardown(() => { - npmConfig.viewer = undefined - spawnCode = 0 - globResult = globDefaults - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['?(un)star'], + woman: true, }) - await t.rejects( - help.exec(['?(un)star']), - /help process exited with code: 1/, - 'received the correct error' - ) + const [spawnBin, spawnArgs] = getArgs() t.equal(spawnBin, 'emacsclient', 'maps woman to emacs correctly') - t.strictSame( - spawnArgs, - ['-e', `(woman-find-file '/root/man/man1/npm-star.1')`], - 'passes the correct arguments' - ) + t.equal(spawnArgs.length, 2) + t.match(spawnArgs[1], /^\(woman-find-file '/) + t.match(spawnArgs[1], /\/man\/man1\/npm-star.1'\)$/) }) t.test('npm help un*', async t => { - globResult = [ - '/root/man/man1/npm-unstar.1', - '/root/man/man1/npm-uninstall.1', - '/root/man/man1/npm-unpublish.1', - ] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null + const { getArgs } = await mockHelp(t, { + exec: ['un*'], }) - await help.exec(['un*']) - + const [spawnBin, spawnArgs] = getArgs() t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['/root/man/man1/npm-uninstall.1'], 'passes the correct arguments') + t.equal(spawnArgs.length, 1) + t.match(spawnArgs[0], /\/man\/man1\/npm-uninstall\.1$/) }) -t.test('npm help - man viewer propagates errors', async t => { - spawnCode = 1 - globResult = [ - '/root/man/man1/npm-unstar.1', - '/root/man/man1/npm-uninstall.1', - '/root/man/man1/npm-unpublish.1', - ] - t.teardown(() => { - spawnCode = 0 - globResult = globDefaults - spawnBin = null - spawnArgs = null +t.test('npm help - prefers npm help pages', async t => { + const { getArgs } = await mockHelp(t, { + man: { + 6: ['npm-install'], + 1: ['install'], + 5: ['install', 'npm-install'], + }, + exec: ['install'], }) - await t.rejects(help.exec(['un*']), /help process exited with code: 1/, 'received correct error') + const [spawnBin, spawnArgs] = getArgs() t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['/root/man/man1/npm-uninstall.1'], 'passes the correct arguments') + t.equal(spawnArgs.length, 1) + t.match(spawnArgs[0], /\/man\/man5\/npm-install\.5$/) }) -t.test('npm help with complex installation path finds proper help file', async t => { - npmConfig.viewer = 'browser' - globResult = [ - 'C:/Program Files/node-v14.15.5-win-x64/node_modules/npm/man/man1/npm-install.1', - // glob always returns forward slashes, even on Windows - ] - - t.teardown(() => { - npmConfig.viewer = undefined - globResult = globDefaults - spawnBin = null - spawnArgs = null +t.test('npm help - works in the presence of strange man pages', async t => { + const { getArgs } = await mockHelp(t, { + man: { + '6strange': ['config'], + 1: ['config'], + '5ssl': ['config'], + }, + exec: ['config'], }) - await help.exec(['1', 'install']) - - t.match(openUrlArg, /commands(\/|\\)npm-install.html$/, 'attempts to open the correct url') + const [spawnBin, spawnArgs] = getArgs() + t.equal(spawnBin, 'man', 'calls man by default') + t.equal(spawnArgs.length, 1) + t.match(spawnArgs[0], /\/man\/man1\/config\.1$/) }) -t.test('npm help - prefers npm help pages', async t => { - // Unusual ordering is to get full test coverage of all branches inside the - // sort function. - globResult = [ - '/root/man/man6/npm-install.6', - '/root/man/man1/install.1', - '/root/man/man5/npm-install.5', - ] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null +t.test('rejects with code', async t => { + const { exec } = await mockHelp(t, { + spawnErr: Object.assign(new Error('errrrr'), { code: 'SPAWN_ERR' }), }) - await help.exec(['install']) - t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['/root/man/man5/npm-install.5'], 'passes the correct arguments') + await t.rejects(exec('whoami'), /help process exited with code: SPAWN_ERR/) }) -t.test('npm help - works in the presence of strange man pages', async t => { - // Unusual ordering is to get full test coverage of all branches inside the - // sort function. - globResult = [ - '/root/man/man6/config.6strange', - '/root/man/man1/config.1', - '/root/man/man5/config.5ssl', - ] - t.teardown(() => { - globResult = globDefaults - spawnBin = null - spawnArgs = null +t.test('rejects with no code', async t => { + const { exec } = await mockHelp(t, { + spawnErr: new Error('errrrr'), }) - await help.exec(['config']) - t.equal(spawnBin, 'man', 'calls man by default') - t.strictSame(spawnArgs, ['/root/man/man1/config.1'], 'passes the correct arguments') + await t.rejects(exec('whoami'), /errrrr/) }) diff --git a/deps/npm/test/lib/commands/hook.js b/deps/npm/test/lib/commands/hook.js index 0cd6a7490dda25..01da9dc720dae5 100644 --- a/deps/npm/test/lib/commands/hook.js +++ b/deps/npm/test/lib/commands/hook.js @@ -1,86 +1,81 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const output = [] -const npm = mockNpm({ - flatOptions: { - json: false, - parseable: false, - unicode: false, - }, - config: { - loglevel: 'info', - }, - output: msg => { - output.push(msg) - }, -}) +const mockNpm = require('../../fixtures/mock-npm') -const pkgTypes = { - semver: 'package', - '@npmcli': 'scope', - npm: 'owner', -} +const mockHook = async (t, { hookResponse, ...npmOpts } = {}) => { + const now = Date.now() -const now = Date.now() -let hookResponse = null -let hookArgs = null -const libnpmhook = { - add: async (pkg, uri, secret, opts) => { - hookArgs = { pkg, uri, secret, opts } - return { id: 1, name: pkg, type: pkgTypes[pkg], endpoint: uri } - }, - ls: async opts => { - hookArgs = opts - let id = 0 - if (hookResponse) { - return hookResponse - } - - return Object.keys(pkgTypes).map(name => ({ - id: ++id, - name, - type: pkgTypes[name], - endpoint: 'https://google.com', - last_delivery: id % 2 === 0 ? now : undefined, - })) - }, - rm: async (id, opts) => { - hookArgs = { id, opts } - const pkg = Object.keys(pkgTypes)[0] - return { - id: 1, - name: pkg, - type: pkgTypes[pkg], - endpoint: 'https://google.com', - } - }, - update: async (id, uri, secret, opts) => { - hookArgs = { id, uri, secret, opts } - const pkg = Object.keys(pkgTypes)[0] - return { id, name: pkg, type: pkgTypes[pkg], endpoint: uri } - }, -} + let hookArgs = null -const Hook = t.mock('../../../lib/commands/hook.js', { - libnpmhook, -}) -const hook = new Hook(npm) + const pkgTypes = { + semver: 'package', + '@npmcli': 'scope', + npm: 'owner', + } + + const libnpmhook = { + add: async (pkg, uri, secret, opts) => { + hookArgs = { pkg, uri, secret, opts } + return { id: 1, name: pkg, type: pkgTypes[pkg], endpoint: uri } + }, + ls: async opts => { + hookArgs = opts + let id = 0 + if (hookResponse) { + return hookResponse + } + + return Object.keys(pkgTypes).map(name => ({ + id: ++id, + name, + type: pkgTypes[name], + endpoint: 'https://google.com', + last_delivery: id % 2 === 0 ? now : undefined, + })) + }, + rm: async (id, opts) => { + hookArgs = { id, opts } + const pkg = Object.keys(pkgTypes)[0] + return { + id: 1, + name: pkg, + type: pkgTypes[pkg], + endpoint: 'https://google.com', + } + }, + update: async (id, uri, secret, opts) => { + hookArgs = { id, uri, secret, opts } + const pkg = Object.keys(pkgTypes)[0] + return { id, name: pkg, type: pkgTypes[pkg], endpoint: uri } + }, + } + + const mock = await mockNpm(t, { + ...npmOpts, + mocks: { + libnpmhook, + ...npmOpts.mocks, + }, + }) + + return { + ...mock, + now, + hook: { exec: (args) => mock.npm.exec('hook', args) }, + hookArgs: () => hookArgs, + } +} t.test('npm hook no args', async t => { + const { hook } = await mockHook(t) await t.rejects(hook.exec([]), hook.usage, 'throws usage with no arguments') }) t.test('npm hook add', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 - }) - + const { npm, hook, outputs, hookArgs } = await mockHook(t) await hook.exec(['add', 'semver', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: 'semver', uri: 'https://google.com', @@ -89,19 +84,15 @@ t.test('npm hook add', async t => { }, 'provided the correct arguments to libnpmhook' ) - t.strictSame(output, ['+ semver -> https://google.com'], 'prints the correct output') + t.strictSame(outputs[0], ['+ semver -> https://google.com'], 'prints the correct output') }) t.test('npm hook add - correct owner hook output', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 - }) - + const { npm, hook, outputs, hookArgs } = await mockHook(t) await hook.exec(['add', '~npm', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: '~npm', uri: 'https://google.com', @@ -110,19 +101,15 @@ t.test('npm hook add - correct owner hook output', async t => { }, 'provided the correct arguments to libnpmhook' ) - t.strictSame(output, ['+ ~npm -> https://google.com'], 'prints the correct output') + t.strictSame(outputs[0], ['+ ~npm -> https://google.com'], 'prints the correct output') }) t.test('npm hook add - correct scope hook output', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 - }) - + const { npm, hook, outputs, hookArgs } = await mockHook(t) await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: '@npmcli', uri: 'https://google.com', @@ -131,21 +118,21 @@ t.test('npm hook add - correct scope hook output', async t => { }, 'provided the correct arguments to libnpmhook' ) - t.strictSame(output, ['+ @npmcli -> https://google.com'], 'prints the correct output') + t.strictSame(outputs[0], ['+ @npmcli -> https://google.com'], 'prints the correct output') }) t.test('npm hook add - unicode output', async t => { - npm.flatOptions.unicode = true - t.teardown(() => { - npm.flatOptions.unicode = false - hookArgs = null - output.length = 0 + const config = { + unicode: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['add', 'semver', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: 'semver', uri: 'https://google.com', @@ -154,21 +141,21 @@ t.test('npm hook add - unicode output', async t => { }, 'provided the correct arguments to libnpmhook' ) - t.strictSame(output, ['+ semver ➜ https://google.com'], 'prints the correct output') + t.strictSame(outputs[0], ['+ semver ➜ https://google.com'], 'prints the correct output') }) t.test('npm hook add - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - hookArgs = null - output.length = 0 + const config = { + json: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: '@npmcli', uri: 'https://google.com', @@ -178,7 +165,7 @@ t.test('npm hook add - json output', async t => { 'provided the correct arguments to libnpmhook' ) t.strictSame( - JSON.parse(output[0]), + JSON.parse(outputs[0][0]), { id: 1, name: '@npmcli', @@ -190,17 +177,17 @@ t.test('npm hook add - json output', async t => { }) t.test('npm hook add - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - hookArgs = null - output.length = 0 + const config = { + parseable: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: '@npmcli', uri: 'https://google.com', @@ -209,30 +196,29 @@ t.test('npm hook add - parseable output', async t => { }, 'provided the correct arguments to libnpmhook' ) + t.strictSame( - output[0].split(/\t/), + outputs[0][0].split(/\t/), ['id', 'name', 'type', 'endpoint'], 'prints the correct parseable output headers' ) t.strictSame( - output[1].split(/\t/), + outputs[1][0].split(/\t/), ['1', '@npmcli', 'scope', 'https://google.com'], 'prints the correct parseable values' ) }) t.test('npm hook add - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - hookArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['add', '@npmcli', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { pkg: '@npmcli', uri: 'https://google.com', @@ -241,55 +227,49 @@ t.test('npm hook add - silent output', async t => { }, 'provided the correct arguments to libnpmhook' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) t.test('npm hook ls', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 - }) - + const { npm, hook, outputs, hookArgs } = await mockHook(t) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, }, 'received the correct arguments' ) - t.equal(output[0], 'You have 3 hooks configured.', 'prints the correct header') - const out = require('../../../lib/utils/ansi-trim')(output[1]) + t.equal(outputs[0][0], 'You have 3 hooks configured.', 'prints the correct header') + const out = require('../../../lib/utils/ansi-trim')(outputs[1][0]) t.match(out, /semver.*https:\/\/google.com.*\n.*\n.*never triggered/, 'prints package hook') t.match(out, /@npmcli.*https:\/\/google.com.*\n.*\n.*triggered just now/, 'prints scope hook') t.match(out, /~npm.*https:\/\/google.com.*\n.*\n.*never triggered/, 'prints owner hook') }) t.test('npm hook ls, no results', async t => { - hookResponse = [] - t.teardown(() => { - hookResponse = null - hookArgs = null - output.length = 0 + const hookResponse = [] + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + hookResponse, }) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, }, 'received the correct arguments' ) - t.equal(output[0], "You don't have any hooks configured yet.", 'prints the correct result') + t.equal(outputs[0][0], "You don't have any hooks configured yet.", 'prints the correct result') }) t.test('npm hook ls, single result', async t => { - hookResponse = [ + const hookResponse = [ { id: 1, name: 'semver', @@ -297,47 +277,44 @@ t.test('npm hook ls, single result', async t => { endpoint: 'https://google.com', }, ] - - t.teardown(() => { - hookResponse = null - hookArgs = null - output.length = 0 + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + hookResponse, }) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, }, 'received the correct arguments' ) - t.equal(output[0], 'You have one hook configured.', 'prints the correct header') - const out = require('../../../lib/utils/ansi-trim')(output[1]) + t.equal(outputs[0][0], 'You have one hook configured.', 'prints the correct header') + const out = require('../../../lib/utils/ansi-trim')(outputs[1][0]) t.match(out, /semver.*https:\/\/google.com.*\n.*\n.*never triggered/, 'prints package hook') }) t.test('npm hook ls - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - hookArgs = null - output.length = 0 + const config = { + json: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, }, 'received the correct arguments' ) - const out = JSON.parse(output[0]) + const out = JSON.parse(outputs[0]) t.match( out, [ @@ -365,17 +342,17 @@ t.test('npm hook ls - json output', async t => { }) t.test('npm hook ls - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - hookArgs = null - output.length = 0 + const config = { + parseable: true, + } + const { npm, hook, outputs, hookArgs, now } = await mockHook(t, { + config, }) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, @@ -383,7 +360,7 @@ t.test('npm hook ls - parseable output', async t => { 'received the correct arguments' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['id', 'name', 'type', 'endpoint', 'last_delivery'], ['1', 'semver', 'package', 'https://google.com', ''], @@ -395,99 +372,92 @@ t.test('npm hook ls - parseable output', async t => { }) t.test('npm hook ls - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - hookArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['ls']) t.match( - hookArgs, + hookArgs(), { ...npm.flatOptions, package: undefined, }, 'received the correct arguments' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) t.test('npm hook rm', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 + const { npm, hook, outputs, hookArgs } = await mockHook(t, { }) - await hook.exec(['rm', '1']) t.match( - hookArgs, + hookArgs(), { id: '1', opts: npm.flatOptions, }, 'received the correct arguments' ) - t.strictSame(output, ['- semver X https://google.com'], 'printed the correct output') + t.strictSame(outputs[0], ['- semver X https://google.com'], 'printed the correct output') }) t.test('npm hook rm - unicode output', async t => { - npm.flatOptions.unicode = true - t.teardown(() => { - npm.flatOptions.unicode = false - hookArgs = null - output.length = 0 + const config = { + unicode: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['rm', '1']) t.match( - hookArgs, + hookArgs(), { id: '1', opts: npm.flatOptions, }, 'received the correct arguments' ) - t.strictSame(output, ['- semver ✘ https://google.com'], 'printed the correct output') + t.strictSame(outputs[0], ['- semver ✘ https://google.com'], 'printed the correct output') }) t.test('npm hook rm - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - hookArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['rm', '1']) t.match( - hookArgs, + hookArgs(), { id: '1', opts: npm.flatOptions, }, 'received the correct arguments' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) t.test('npm hook rm - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - hookArgs = null - output.length = 0 + const config = { + json: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['rm', '1']) t.match( - hookArgs, + hookArgs(), { id: '1', opts: npm.flatOptions, @@ -495,7 +465,7 @@ t.test('npm hook rm - json output', async t => { 'received the correct arguments' ) t.strictSame( - JSON.parse(output[0]), + JSON.parse(outputs[0]), { id: 1, name: 'semver', @@ -507,17 +477,17 @@ t.test('npm hook rm - json output', async t => { }) t.test('npm hook rm - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - hookArgs = null - output.length = 0 + const config = { + parseable: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['rm', '1']) t.match( - hookArgs, + hookArgs(), { id: '1', opts: npm.flatOptions, @@ -525,7 +495,7 @@ t.test('npm hook rm - parseable output', async t => { 'received the correct arguments' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['id', 'name', 'type', 'endpoint'], ['1', 'semver', 'package', 'https://google.com'], @@ -535,15 +505,12 @@ t.test('npm hook rm - parseable output', async t => { }) t.test('npm hook update', async t => { - t.teardown(() => { - hookArgs = null - output.length = 0 + const { npm, hook, outputs, hookArgs } = await mockHook(t, { }) - await hook.exec(['update', '1', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { id: '1', uri: 'https://google.com', @@ -552,21 +519,21 @@ t.test('npm hook update', async t => { }, 'received the correct arguments' ) - t.strictSame(output, ['+ semver -> https://google.com'], 'printed the correct output') + t.strictSame(outputs[0], ['+ semver -> https://google.com'], 'printed the correct output') }) t.test('npm hook update - unicode', async t => { - npm.flatOptions.unicode = true - t.teardown(() => { - npm.flatOptions.unicode = false - hookArgs = null - output.length = 0 + const config = { + unicode: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['update', '1', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { id: '1', uri: 'https://google.com', @@ -575,21 +542,21 @@ t.test('npm hook update - unicode', async t => { }, 'received the correct arguments' ) - t.strictSame(output, ['+ semver ➜ https://google.com'], 'printed the correct output') + t.strictSame(outputs[0], ['+ semver ➜ https://google.com'], 'printed the correct output') }) t.test('npm hook update - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - hookArgs = null - output.length = 0 + const config = { + json: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['update', '1', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { id: '1', uri: 'https://google.com', @@ -599,7 +566,7 @@ t.test('npm hook update - json output', async t => { 'received the correct arguments' ) t.strictSame( - JSON.parse(output[0]), + JSON.parse(outputs[0]), { id: '1', name: 'semver', @@ -611,17 +578,17 @@ t.test('npm hook update - json output', async t => { }) t.test('npm hook update - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - hookArgs = null - output.length = 0 + const config = { + parseable: true, + } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['update', '1', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { id: '1', uri: 'https://google.com', @@ -631,7 +598,7 @@ t.test('npm hook update - parseable output', async t => { 'received the correct arguments' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['id', 'name', 'type', 'endpoint'], ['1', 'semver', 'package', 'https://google.com'], @@ -641,17 +608,15 @@ t.test('npm hook update - parseable output', async t => { }) t.test('npm hook update - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - hookArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, hook, outputs, hookArgs } = await mockHook(t, { + config, }) await hook.exec(['update', '1', 'https://google.com', 'some-secret']) t.match( - hookArgs, + hookArgs(), { id: '1', uri: 'https://google.com', @@ -660,5 +625,5 @@ t.test('npm hook update - silent output', async t => { }, 'received the correct arguments' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) diff --git a/deps/npm/test/lib/commands/init.js b/deps/npm/test/lib/commands/init.js index d11e0091b7cff3..2d59f47d9842de 100644 --- a/deps/npm/test/lib/commands/init.js +++ b/deps/npm/test/lib/commands/init.js @@ -1,108 +1,88 @@ const t = require('tap') -const fs = require('fs') -const { resolve } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const config = { - cache: 'bad-cache-dir', - 'init-module': '~/.npm-init.js', - yes: true, -} -const flatOptions = { - cache: 'test-config-dir/_cacache', - npxCache: 'test-config-dir/_npx', -} -const npm = mockNpm({ - flatOptions, - config, -}) -const mocks = { - npmlog: { - disableProgress: () => null, - enableProgress: () => null, - }, - 'proc-log': { - info: () => null, - pause: () => null, - resume: () => null, - silly: () => null, - }, +const fs = require('fs/promises') +const { resolve, basename } = require('path') +const _mockNpm = require('../../fixtures/mock-npm') +const { cleanTime } = require('../../fixtures/clean-snapshot') + +t.cleanSnapshot = cleanTime + +const mockNpm = async (t, { noLog, libnpmexec, initPackageJson, packageJson, ...opts } = {}) => { + const res = await _mockNpm(t, { + ...opts, + mocks: { + ...(libnpmexec ? { libnpmexec } : {}), + ...(initPackageJson ? { 'init-package-json': initPackageJson } : {}), + ...(packageJson ? { '@npmcli/package-json': packageJson } : {}), + }, + globals: { + // init-package-json prints directly to console.log + // this avoids poluting test output with those logs + ...(noLog ? { 'console.log': () => {} } : {}), + }, + }) + + return res } -const Init = t.mock('../../../lib/commands/init.js', mocks) -const init = new Init(npm) -const _cwd = process.cwd() -const _consolelog = console.log -const noop = () => {} - -t.afterEach(() => { - config.yes = true - config.package = undefined - process.chdir(_cwd) - console.log = _consolelog + +t.test('displays output', async t => { + const { npm, joinedOutput } = await mockNpm(t, { + initPackageJson: (...args) => args[3](), + }) + + await npm.exec('init', []) + t.matchSnapshot(joinedOutput(), 'displays helper info') }) t.test('classic npm init -y', async t => { - npm.localPrefix = t.testdir({}) - - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop + const { npm, prefix } = await mockNpm(t, { + config: { yes: true }, + noLog: true, + }) - process.chdir(npm.localPrefix) - await init.exec([]) + await npm.exec('init', []) - const pkg = require(resolve(npm.localPrefix, 'package.json')) + const pkg = require(resolve(prefix, 'package.json')) t.equal(pkg.version, '1.0.0') t.equal(pkg.license, 'ISC') }) t.test('classic interactive npm init', async t => { - npm.localPrefix = t.testdir({}) - config.yes = undefined + t.plan(1) - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - 'init-package-json': (path, initFile, config, cb) => { + const { npm } = await mockNpm(t, { + initPackageJson: (...args) => { t.equal( - path, + args[0], resolve(npm.localPrefix), 'should start init package.json in expected path' ) - cb() + args[3]() }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec([]) + await npm.exec('init', []) }) t.test('npm init ', async t => { - t.plan(3) - npm.localPrefix = t.testdir({}) + t.plan(1) - const Init = t.mock('../../../lib/commands/init.js', { - libnpmexec: ({ args, cache, npxCache }) => { + const { npm } = await mockNpm(t, { + libnpmexec: ({ args }) => { t.same( args, ['create-react-app@*'], 'should npx with listed packages' ) - t.same(cache, flatOptions.cache) - t.same(npxCache, flatOptions.npxCache) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['react-app']) + await npm.exec('init', ['react-app']) }) t.test('npm init -- other-args', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -110,18 +90,16 @@ t.test('npm init -- other-args', async t => { 'should npm exec with expected args' ) }, + }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['react-app', 'my-path', '--some-option', 'some-value']) + await npm.exec('init', ['react-app', 'my-path', '--some-option', 'some-value']) }) t.test('npm init @scope/name', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -130,17 +108,14 @@ t.test('npm init @scope/name', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['@npmcli/something']) + await npm.exec('init', ['@npmcli/something']) }) t.test('npm init @scope@spec', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -149,17 +124,14 @@ t.test('npm init @scope@spec', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['@npmcli@foo']) + await npm.exec('init', ['@npmcli@foo']) }) t.test('npm init @scope/name@spec', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -168,17 +140,13 @@ t.test('npm init @scope/name@spec', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['@npmcli/something@foo']) + await npm.exec('init', ['@npmcli/something@foo']) }) t.test('npm init git spec', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -187,17 +155,14 @@ t.test('npm init git spec', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['npm/something']) + await npm.exec('init', ['npm/something']) }) t.test('npm init @scope', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -206,18 +171,15 @@ t.test('npm init @scope', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['@npmcli']) + await npm.exec('init', ['@npmcli']) }) t.test('npm init tgz', async t => { - npm.localPrefix = t.testdir({}) + const { npm } = await mockNpm(t) - process.chdir(npm.localPrefix) await t.rejects( - init.exec(['something.tgz']), + npm.exec('init', ['something.tgz']), /Unrecognized initializer: something.tgz/, 'should throw error when using an unsupported spec' ) @@ -225,9 +187,8 @@ t.test('npm init tgz', async t => { t.test('npm init @next', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: ({ args }) => { t.same( args, @@ -236,25 +197,19 @@ t.test('npm init @next', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['something@next']) + await npm.exec('init', ['something@next']) }) t.test('npm init exec error', async t => { - npm.localPrefix = t.testdir({}) - - const Init = t.mock('../../../lib/commands/init.js', { - libnpmexec: async ({ args }) => { + const { npm } = await mockNpm(t, { + libnpmexec: async () => { throw new Error('ERROR') }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) await t.rejects( - init.exec(['something@next']), + npm.exec('init', ['something@next']), /ERROR/, 'should exit with exec error' ) @@ -262,9 +217,8 @@ t.test('npm init exec error', async t => { t.test('should not rewrite flatOptions', async t => { t.plan(1) - npm.localPrefix = t.testdir({}) - const Init = t.mock('../../../lib/commands/init.js', { + const { npm } = await mockNpm(t, { libnpmexec: async ({ args }) => { t.same( args, @@ -273,270 +227,217 @@ t.test('should not rewrite flatOptions', async t => { ) }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec(['react-app', 'my-app']) + await npm.exec('init', ['react-app', 'my-app']) }) t.test('npm init cancel', async t => { - t.plan(2) - npm.localPrefix = t.testdir({}) - - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - 'init-package-json': (dir, initFile, config, cb) => cb( + const { npm, logs } = await mockNpm(t, { + initPackageJson: (...args) => args[3]( new Error('canceled') ), - 'proc-log': { - ...mocks['proc-log'], - warn: (title, msg) => { - t.equal(title, 'init', 'should have init title') - t.equal(msg, 'canceled', 'should log canceled') - }, - }, }) - const init = new Init(npm) - process.chdir(npm.localPrefix) - await init.exec([]) + await npm.exec('init', []) + + t.equal(logs.warn[0][0], 'init', 'should have init title') + t.equal(logs.warn[0][1], 'canceled', 'should log canceled') }) t.test('npm init error', async t => { - npm.localPrefix = t.testdir({}) - - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - 'init-package-json': (dir, initFile, config, cb) => cb( + const { npm } = await mockNpm(t, { + initPackageJson: (...args) => args[3]( new Error('Unknown Error') ), }) - const init = new Init(npm) - process.chdir(npm.localPrefix) await t.rejects( - init.exec([]), + npm.exec('init', []), /Unknown Error/, 'should throw error' ) }) -t.test('workspaces', t => { - t.test('no args', async t => { - t.teardown(() => { - npm._mockOutputs.length = 0 - }) - npm._mockOutputs.length = 0 - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'top-level', - }), - }) - - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - 'init-package-json': (dir, initFile, config, cb) => { - t.equal(dir, resolve(npm.localPrefix, 'a'), 'should use the ws path') - cb() +t.test('workspaces', async t => { + await t.test('no args -- yes', async t => { + const { npm, prefix, joinedOutput } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'top-level', + }), }, + config: { workspace: 'a', yes: true }, + noLog: true, }) - const init = new Init(npm) - await init.execWorkspaces([], ['a']) - t.matchSnapshot(npm._mockOutputs, 'should print helper info') - }) - t.test('post workspace-init reify', async t => { - const _consolelog = console.log - console.log = () => null - t.teardown(() => { - console.log = _consolelog - npm._mockOutputs.length = 0 - delete npm.flatOptions.workspacesUpdate - }) - npm.started = Date.now() - npm._mockOutputs.length = 0 - npm.flatOptions.workspacesUpdate = true - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'top-level', - }), - }) + await npm.exec('init', []) - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - 'init-package-json': (dir, initFile, config, cb) => { - t.equal(dir, resolve(npm.localPrefix, 'a'), 'should use the ws path') - return require('init-package-json')(dir, initFile, config, cb) - }, - }) - const init = new Init(npm) - await init.execWorkspaces([], ['a']) - const output = npm._mockOutputs.map(arr => arr.map(i => i.replace(/[0-9]*m?s$/, '100ms'))) - t.matchSnapshot(output, 'should print helper info') - const lockFilePath = resolve(npm.localPrefix, 'package-lock.json') - const lockFile = fs.readFileSync(lockFilePath, { encoding: 'utf8' }) - t.matchSnapshot(lockFile, 'should reify tree on init ws complete') - }) - - t.test('no args, existing folder', async t => { - t.teardown(() => { - npm._mockOutputs.length = 0 - }) - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop + const pkg = require(resolve(prefix, 'a/package.json')) + t.equal(pkg.name, 'a') + t.equal(pkg.version, '1.0.0') + t.equal(pkg.license, 'ISC') - npm.localPrefix = t.testdir({ - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - }, - 'package.json': JSON.stringify({ - name: 'top-level', - workspaces: ['packages/a'], - }), - }) + t.matchSnapshot(joinedOutput(), 'should print helper info') - await init.execWorkspaces([], ['packages/a']) - - t.matchSnapshot(npm._mockOutputs, 'should print helper info') + const lock = require(resolve(prefix, 'package-lock.json')) + t.ok(lock.packages.a) }) - t.test('with arg but missing workspace folder', async t => { - t.teardown(() => { - npm._mockOutputs.length = 0 - }) - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop - - npm.localPrefix = t.testdir({ - node_modules: { - a: t.fixture('symlink', '../a'), - 'create-index': { - 'index.js': ``, + await t.test('no args, existing folder', async t => { + const { npm, prefix } = await mockNpm(t, { + prefixDir: { + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '2.0.0', + }), + }, }, - }, - a: { 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', + name: 'top-level', + workspaces: ['packages/a'], }), }, - 'package.json': JSON.stringify({ - name: 'top-level', - }), + config: { workspace: 'packages/a', yes: true }, + noLog: true, }) - await init.execWorkspaces([], ['packages/a']) + await npm.exec('init', []) - t.matchSnapshot(npm._mockOutputs, 'should print helper info') + const pkg = require(resolve(prefix, 'packages/a/package.json')) + t.equal(pkg.name, 'a') + t.equal(pkg.version, '2.0.0') + t.equal(pkg.license, 'ISC') }) - t.test('fail parsing top-level package.json to set workspace', async t => { - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop - - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'top-level', - }), - }) - - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - '@npmcli/package-json': { + await t.test('fail parsing top-level package.json to set workspace', async t => { + const { npm } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'top-level', + }), + }, + packageJson: { async load () { throw new Error('ERR') }, }, + config: { workspace: 'a', yes: true }, + noLog: true, }) - const init = new Init(npm) await t.rejects( - init.execWorkspaces([], ['a']), + npm.exec('init', []), /ERR/, 'should exit with error' ) }) - t.test('missing top-level package.json when settting workspace', async t => { - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop - - npm.localPrefix = t.testdir({}) - - const Init = require('../../../lib/commands/init.js') - const init = new Init(npm) + await t.test('missing top-level package.json when settting workspace', async t => { + const { npm, logs } = await mockNpm(t, { + config: { workspace: 'a' }, + }) await t.rejects( - init.execWorkspaces([], ['a']), + npm.exec('init', []), { code: 'ENOENT' }, 'should exit with missing package.json file error' ) + + t.equal(logs.warn[0][0], 'Missing package.json. Try with `--include-workspace-root`.') + }) + + await t.test('bad package.json when settting workspace', async t => { + const { npm, logs } = await mockNpm(t, { + prefixDir: { + 'package.json': '{{{{', + }, + config: { workspace: 'a' }, + }) + + await t.rejects( + npm.exec('init', []), + { code: 'EJSONPARSE' }, + 'should exit with parse file error' + ) + + t.strictSame(logs.warn, []) }) - t.test('using args', async t => { - npm.localPrefix = t.testdir({ - b: { + await t.test('using args - no package.json', async t => { + const { npm, prefix } = await mockNpm(t, { + prefixDir: { + b: { + 'package.json': JSON.stringify({ + name: 'b', + }), + }, 'package.json': JSON.stringify({ - name: 'b', + name: 'top-level', + workspaces: ['b'], }), }, - 'package.json': JSON.stringify({ - name: 'top-level', - workspaces: ['b'], - }), + // Important: exec did not write a package.json here + libnpmexec: async () => {}, + config: { workspace: 'a', yes: true }, }) - const Init = t.mock('../../../lib/commands/init.js', { - ...mocks, - libnpmexec: ({ args, path }) => { - t.same( - args, - ['create-react-app@*'], - 'should npx with listed packages' - ) - t.same( - path, - resolve(npm.localPrefix, 'a'), - 'should use workspace path' - ) - fs.writeFileSync( - resolve(npm.localPrefix, 'a/package.json'), - JSON.stringify({ name: 'a' }) - ) + await npm.exec('init', ['react-app']) + + const pkg = require(resolve(prefix, 'package.json')) + t.strictSame(pkg.workspaces, ['b'], 'pkg workspaces did not get updated') + }) + + await t.test('init template - bad package.json', async t => { + const { npm, prefix } = await mockNpm(t, { + prefixDir: { + b: { + 'package.json': JSON.stringify({ + name: 'b', + }), + }, + 'package.json': JSON.stringify({ + name: 'top-level', + workspaces: ['b'], + }), + }, + initPackageJson: async (...args) => { + const [dir] = args + if (dir.endsWith('c')) { + await fs.writeFile(resolve(dir, 'package.json'), JSON.stringify({ + name: basename(dir), + }), 'utf-8') + } + args[3]() }, + config: { yes: true, workspace: ['a', 'c'] }, }) - const init = new Init(npm) - await init.execWorkspaces(['react-app'], ['a']) - }) + await npm.exec('init', []) - t.end() -}) + const pkg = require(resolve(prefix, 'package.json')) + t.strictSame(pkg.workspaces, ['b', 'c']) -t.test('npm init workspces with root', async t => { - t.teardown(() => { - npm._mockOutputs.length = 0 + const lock = require(resolve(prefix, 'package-lock.json')) + t.notOk(lock.packages.a) }) - npm.localPrefix = t.testdir({}) - npm.flatOptions.includeWorkspaceRoot = true - // init-package-json prints directly to console.log - // this avoids poluting test output with those logs - console.log = noop + t.test('workspace root', async t => { + const { npm } = await mockNpm(t, { + config: { workspace: 'packages/a', 'include-workspace-root': true, yes: true }, + noLog: true, + }) + + await npm.exec('init', []) - process.chdir(npm.localPrefix) - await init.execWorkspaces([], ['packages/a']) - const pkg = require(resolve(npm.localPrefix, 'package.json')) - t.equal(pkg.version, '1.0.0') - t.equal(pkg.license, 'ISC') - t.matchSnapshot(npm._mockOutputs, 'does not print helper info') + const pkg = require(resolve(npm.localPrefix, 'package.json')) + t.equal(pkg.version, '1.0.0') + t.equal(pkg.license, 'ISC') + t.strictSame(pkg.workspaces, ['packages/a']) + + const ws = require(resolve(npm.localPrefix, 'packages/a/package.json')) + t.equal(ws.version, '1.0.0') + t.equal(ws.license, 'ISC') + }) }) diff --git a/deps/npm/test/lib/commands/install.js b/deps/npm/test/lib/commands/install.js index 4c3251f52fbc04..1be42d6e6125f0 100644 --- a/deps/npm/test/lib/commands/install.js +++ b/deps/npm/test/lib/commands/install.js @@ -1,9 +1,5 @@ const t = require('tap') - -const { load: _loadMockNpm } = require('../../fixtures/mock-npm') - -// Make less churn in the test to pass in mocks only signature -const loadMockNpm = (t, mocks) => _loadMockNpm(t, { mocks }) +const { load: loadMockNpm } = require('../../fixtures/mock-npm') t.test('exec commands', async t => { await t.test('with args, dev=true', async t => { @@ -13,29 +9,32 @@ t.test('exec commands', async t => { let ARB_OBJ = null const { npm } = await loadMockNpm(t, { - '@npmcli/run-script': ({ event }) => { - SCRIPTS.push(event) - }, - '@npmcli/arborist': function (args) { - ARB_ARGS = args - ARB_OBJ = this - this.reify = () => { - REIFY_CALLED = true - } + mocks: { + '@npmcli/run-script': ({ event }) => { + SCRIPTS.push(event) + }, + '@npmcli/arborist': function (args) { + ARB_ARGS = args + ARB_OBJ = this + this.reify = () => { + REIFY_CALLED = true + } + }, + '{LIB}/utils/reify-finish.js': (_, arb) => { + if (arb !== ARB_OBJ) { + throw new Error('got wrong object passed to reify-finish') + } + }, }, - '../../lib/utils/reify-finish.js': (npm, arb) => { - if (arb !== ARB_OBJ) { - throw new Error('got wrong object passed to reify-finish') - } + config: { + // This is here because CI calls tests with `--ignore-scripts`, which config + // picks up from argv + 'ignore-scripts': false, + 'audit-level': 'low', + dev: true, }, }) - // This is here because CI calls tests with `--ignore-scripts`, which config - // picks up from argv - npm.config.set('ignore-scripts', false) - npm.config.set('audit-level', 'low') - npm.config.set('dev', true) - await npm.exec('install', ['fizzbuzz']) t.match( @@ -54,24 +53,28 @@ t.test('exec commands', async t => { let ARB_OBJ = null const { npm } = await loadMockNpm(t, { - '@npmcli/run-script': ({ event }) => { - SCRIPTS.push(event) - }, - '@npmcli/arborist': function (args) { - ARB_ARGS = args - ARB_OBJ = this - this.reify = () => { - REIFY_CALLED = true - } + mocks: { + '@npmcli/run-script': ({ event }) => { + SCRIPTS.push(event) + }, + '@npmcli/arborist': function (args) { + ARB_ARGS = args + ARB_OBJ = this + this.reify = () => { + REIFY_CALLED = true + } + }, + '{LIB}/utils/reify-finish.js': (_, arb) => { + if (arb !== ARB_OBJ) { + throw new Error('got wrong object passed to reify-finish') + } + }, }, - '../../lib/utils/reify-finish.js': (npm, arb) => { - if (arb !== ARB_OBJ) { - throw new Error('got wrong object passed to reify-finish') - } + config: { + }, }) - npm.config.set('ignore-scripts', false) await npm.exec('install', []) t.match(ARB_ARGS, { global: false, path: npm.prefix }) t.equal(REIFY_CALLED, true, 'called reify') @@ -90,17 +93,22 @@ t.test('exec commands', async t => { const SCRIPTS = [] let REIFY_CALLED = false const { npm } = await loadMockNpm(t, { - '../../lib/utils/reify-finish.js': async () => {}, - '@npmcli/run-script': ({ event }) => { - SCRIPTS.push(event) + mocks: { + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/run-script': ({ event }) => { + SCRIPTS.push(event) + }, + '@npmcli/arborist': function () { + this.reify = () => { + REIFY_CALLED = true + } + }, }, - '@npmcli/arborist': function () { - this.reify = () => { - REIFY_CALLED = true - } + config: { + 'ignore-scripts': true, }, }) - npm.config.set('ignore-scripts', true) + await npm.exec('install', []) t.equal(REIFY_CALLED, true, 'called reify') t.strictSame(SCRIPTS, [], 'no scripts when adding dep') @@ -111,18 +119,22 @@ t.test('exec commands', async t => { let ARB_ARGS = null let REIFY_CALLED const { npm } = await loadMockNpm(t, { - '@npmcli/run-script': ({ event }) => { - SCRIPTS.push(event) + mocks: { + '@npmcli/run-script': ({ event }) => { + SCRIPTS.push(event) + }, + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function (args) { + ARB_ARGS = args + this.reify = () => { + REIFY_CALLED = true + } + }, }, - '../../lib/utils/reify-finish.js': async () => {}, - '@npmcli/arborist': function (args) { - ARB_ARGS = args - this.reify = () => { - REIFY_CALLED = true - } + config: { + global: true, }, }) - npm.config.set('global', true) await npm.exec('install', []) t.match( ARB_ARGS, @@ -130,18 +142,22 @@ t.test('exec commands', async t => { ) t.equal(REIFY_CALLED, true, 'called reify') t.strictSame(SCRIPTS, [], 'no scripts when installing globally') - t.equal(npm.config.get('audit', 'cli'), false) + t.notOk(npm.config.get('audit', 'cli')) }) await t.test('should not install invalid global package name', async t => { const { npm } = await loadMockNpm(t, { - '@npmcli/run-script': () => {}, - '../../lib/utils/reify-finish.js': async () => {}, - '@npmcli/arborist': function (args) { - throw new Error('should not reify') + mocks: { + '@npmcli/run-script': () => {}, + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function (args) { + throw new Error('should not reify') + }, + }, + config: { + global: true, }, }) - npm.config.set('global', true) await t.rejects( npm.exec('install', ['']), /Usage:/, @@ -151,41 +167,49 @@ t.test('exec commands', async t => { await t.test('npm i -g npm engines check success', async t => { const { npm } = await loadMockNpm(t, { - '../../lib/utils/reify-finish.js': async () => {}, - '@npmcli/arborist': function () { - this.reify = () => {} - }, - pacote: { - manifest: () => { - return { - version: '100.100.100', - engines: { - node: '>1', - }, - } + mocks: { + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function () { + this.reify = () => {} + }, + pacote: { + manifest: () => { + return { + version: '100.100.100', + engines: { + node: '>1', + }, + } + }, }, }, + config: { + global: true, + }, }) - npm.config.set('global', true) await npm.exec('install', ['npm']) t.ok('No exceptions happen') }) await t.test('npm i -g npm engines check failure', async t => { const { npm } = await loadMockNpm(t, { - pacote: { - manifest: () => { - return { - _id: 'npm@1.2.3', - version: '100.100.100', - engines: { - node: '>1000', - }, - } + mocks: { + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, }, }, + config: { + global: true, + }, }) - npm.config.set('global', true) await t.rejects( npm.exec('install', ['npm']), { @@ -205,43 +229,55 @@ t.test('exec commands', async t => { await t.test('npm i -g npm engines check failure forced override', async t => { const { npm } = await loadMockNpm(t, { - '../../lib/utils/reify-finish.js': async () => {}, - '@npmcli/arborist': function () { - this.reify = () => {} - }, - pacote: { - manifest: () => { - return { - _id: 'npm@1.2.3', - version: '100.100.100', - engines: { - node: '>1000', - }, - } + mocks: { + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function () { + this.reify = () => {} + }, + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, }, }, + config: { + global: true, + force: true, + }, }) - npm.config.set('global', true) - npm.config.set('force', true) await npm.exec('install', ['npm']) t.ok('Does not throw') }) await t.test('npm i -g npm@version engines check failure', async t => { const { npm } = await loadMockNpm(t, { - pacote: { - manifest: () => { - return { - _id: 'npm@1.2.3', - version: '100.100.100', - engines: { - node: '>1000', - }, - } + mocks: { + '{LIB}/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function () { + this.reify = () => {} + }, + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, }, }, + config: { + global: true, + }, }) - npm.config.set('global', true) await t.rejects( npm.exec('install', ['npm@100']), { @@ -261,138 +297,129 @@ t.test('exec commands', async t => { }) t.test('completion', async t => { - const cwd = process.cwd() - const testdir = t.testdir({ - arborist: { - 'package.json': '{}', + const mockComp = async (t, { noChdir } = {}) => loadMockNpm(t, { + command: 'install', + prefixDir: { + arborist: { + 'package.json': '{}', + }, + 'arborist.txt': 'just a file', + 'other-dir': { a: 'a' }, }, - 'arborist.txt': 'just a file', - other: {}, - }) - t.afterEach(() => { - process.chdir(cwd) + ...(noChdir ? { chdir: false } : {}), }) - t.test('completion to folder - has a match', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('completion to folder - has a match', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: './ar' }) t.strictSame(res, ['arborist'], 'package dir match') }) - t.test('completion to folder - invalid dir', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') + await t.test('completion to folder - invalid dir', async t => { + const { install } = await mockComp(t, { noChdir: true }) const res = await install.completion({ partialWord: '/does/not/exist' }) t.strictSame(res, [], 'invalid dir: no matching') }) - t.test('completion to folder - no matches', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('completion to folder - no matches', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: './pa' }) t.strictSame(res, [], 'no name match') }) - t.test('completion to folder - match is not a package', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('completion to folder - match is not a package', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: './othe' }) t.strictSame(res, [], 'no name match') }) - t.test('completion to url', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('completion to url', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: 'http://path/to/url' }) t.strictSame(res, []) }) - t.test('no /', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('no /', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: 'toto' }) t.notOk(res) }) - t.test('only /', async t => { - const { npm } = await _loadMockNpm(t, { load: false }) - const install = await npm.cmd('install') - process.chdir(testdir) + await t.test('only /', async t => { + const { install } = await mockComp(t) const res = await install.completion({ partialWord: '/' }) t.strictSame(res, []) }) }) -t.test('location detection and audit', async () => { - t.test('audit false without package.json', async t => { - const { npm } = await _loadMockNpm(t, { +t.test('location detection and audit', async (t) => { + await t.test('audit false without package.json', async t => { + const { npm } = await loadMockNpm(t, { prefixDir: { // no package.json 'readme.txt': 'just a file', - other: {}, + 'other-dir': { a: 'a' }, }, }) const install = await npm.cmd('install') t.equal(install.npm.config.get('location'), 'user') t.equal(install.npm.config.get('audit'), false) }) - t.test('audit true with package.json', async t => { - const { npm } = await _loadMockNpm(t, { + + await t.test('audit true with package.json', async t => { + const { npm } = await loadMockNpm(t, { prefixDir: { 'package.json': '{ "name": "testpkg", "version": "1.0.0" }', 'readme.txt': 'just a file', }, }) const install = await npm.cmd('install') - t.equal(install.npm.config.get('location'), 'project') + t.equal(install.npm.config.get('location'), 'user') t.equal(install.npm.config.get('audit'), true) }) - t.test('audit true without package.json when set', async t => { - const { npm } = await _loadMockNpm(t, { + + await t.test('audit true without package.json when set', async t => { + const { npm } = await loadMockNpm(t, { prefixDir: { // no package.json 'readme.txt': 'just a file', - other: {}, + 'other-dir': { a: 'a' }, }, config: { - audit: { value: true, where: 'cli' }, + audit: true, }, }) const install = await npm.cmd('install') t.equal(install.npm.config.get('location'), 'user') t.equal(install.npm.config.get('audit'), true) }) - t.test('audit true in root config without package.json', async t => { - const { npm } = await _loadMockNpm(t, { + + await t.test('audit true in root config without package.json', async t => { + const { npm } = await loadMockNpm(t, { prefixDir: { // no package.json 'readme.txt': 'just a file', - other: {}, - }, - config: { - audit: { value: true, where: 'builtin' }, + 'other-dir': { a: 'a' }, }, + // change npmRoot to get it to use a builtin rc file + otherDirs: { npmrc: 'audit=true' }, + npm: ({ other }) => ({ npmRoot: other }), }) const install = await npm.cmd('install') t.equal(install.npm.config.get('location'), 'user') t.equal(install.npm.config.get('audit'), true) }) - t.test('test for warning when --global & --audit', async t => { - const { npm, logs } = await _loadMockNpm(t, { + + await t.test('test for warning when --global & --audit', async t => { + const { npm, logs } = await loadMockNpm(t, { prefixDir: { // no package.json 'readme.txt': 'just a file', - other: {}, + 'other-dir': { a: 'a' }, }, config: { - audit: { value: true, where: 'cli' }, - global: { value: true, where: 'cli' }, + audit: true, + global: true, }, }) const install = await npm.cmd('install') diff --git a/deps/npm/test/lib/commands/link.js b/deps/npm/test/lib/commands/link.js index d908fa025fbde7..feae75a4b9096f 100644 --- a/deps/npm/test/lib/commands/link.js +++ b/deps/npm/test/lib/commands/link.js @@ -1,95 +1,86 @@ const t = require('tap') const { resolve, join } = require('path') const fs = require('fs') - const Arborist = require('@npmcli/arborist') -const { fake: mockNpm, load: fullMockNpm } = require('../../fixtures/mock-npm') - -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') -} - -t.cleanSnapshot = (str) => redactCwd(str) - -const config = {} -const npm = mockNpm({ - globalDir: null, - prefix: null, - config, -}) +const { cleanCwd } = require('../../fixtures/clean-snapshot.js') +const mockNpm = require('../../fixtures/mock-npm') + +t.cleanSnapshot = (str) => cleanCwd(str) + +const mockLink = async (t, { globalPrefixDir, ...opts } = {}) => { + const mock = await mockNpm(t, { + ...opts, + globalPrefixDir, + mocks: { + ...opts.mocks, + '{LIB}/utils/reify-output.js': async () => {}, + }, + }) -const printLinks = async (opts) => { - let res = '' - const arb = new Arborist(opts) - const tree = await arb.loadActual() - const linkedItems = [...tree.inventory.values()] - .sort((a, b) => a.pkgid.localeCompare(b.pkgid, 'en')) - for (const item of linkedItems) { - if (item.isLink) { - res += `${item.path} -> ${item.target.path}\n` + const printLinks = async ({ global = false } = {}) => { + let res = '' + const arb = new Arborist(global ? { + path: resolve(mock.npm.globalDir, '..'), + global: true, + } : { path: mock.prefix }) + const tree = await arb.loadActual() + const linkedItems = [...tree.inventory.values()] + .sort((a, b) => a.pkgid.localeCompare(b.pkgid, 'en')) + for (const item of linkedItems) { + if (item.isLink) { + res += `${item.path} -> ${item.target.path}\n` + } } + return res } - return res -} -const mocks = { - '../../../lib/utils/reify-output.js': async () => {}, + return { + ...mock, + link: { + exec: (args = []) => mock.npm.exec('link', args), + completion: (o) => mock.npm.cmd('link').then(c => c.completion(o)), + }, + printLinks, + } } -const Link = t.mock('../../../lib/commands/link.js', mocks) -const link = new Link(npm) - t.test('link to globalDir when in current working dir of pkg and no args', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, + const { link, printLinks } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, }, }, - 'test-pkg-link': { + prefixDir: { 'package.json': JSON.stringify({ name: 'test-pkg-link', version: '1.0.0', }), }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'test-pkg-link') - await link.exec([]) - const links = await printLinks({ - path: resolve(npm.globalDir, '..'), - global: true, - }) - t.matchSnapshot(links, 'should create a global link to current pkg') + await link.exec() + t.matchSnapshot(await printLinks({ global: true }), 'should create a global link to current pkg') }) t.test('link ws to globalDir when workspace specified and no args', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, + const { link, printLinks } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, }, }, - 'test-pkg-link': { + prefixDir: { 'package.json': JSON.stringify({ name: 'test-pkg-link', version: '1.0.0', @@ -104,77 +95,68 @@ t.test('link ws to globalDir when workspace specified and no args', async t => { }, }, }, - }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'test-pkg-link') - npm.localPrefix = resolve(testdir, 'test-pkg-link') - - // link.workspaces = ['a'] - // link.workspacePaths = [resolve(testdir, 'test-pkg-link/packages/a')] - await link.execWorkspaces([], ['a']) - const links = await printLinks({ - path: resolve(npm.globalDir, '..'), - global: true, + config: { workspace: 'a' }, }) - t.matchSnapshot(links, 'should create a global link to current pkg') + await link.exec() + t.matchSnapshot(await printLinks({ global: true }), 'should create a global link to current pkg') }) t.test('link global linked pkg to local nm when using args', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - '@myscope': { - foo: { - 'package.json': JSON.stringify({ - name: '@myscope/foo', - version: '1.0.0', - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: '@myscope/bar', - version: '1.0.0', - }), - }, - linked: t.fixture('symlink', '../../../../scoped-linked'), - }, - a: { + const { link, printLinks } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + '@myscope': { + foo: { 'package.json': JSON.stringify({ - name: 'a', + name: '@myscope/foo', version: '1.0.0', }), }, - b: { + bar: { 'package.json': JSON.stringify({ - name: 'b', + name: '@myscope/bar', version: '1.0.0', }), }, - 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link'), + linked: t.fixture('symlink', '../../../other/scoped-linked'), + }, + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), }, + 'test-pkg-link': t.fixture('symlink', '../../other/test-pkg-link'), }, }, - 'test-pkg-link': { - 'package.json': JSON.stringify({ - name: 'test-pkg-link', - version: '1.0.0', - }), - }, - 'link-me-too': { - 'package.json': JSON.stringify({ - name: 'link-me-too', - version: '1.0.0', - }), - }, - 'scoped-linked': { - 'package.json': JSON.stringify({ - name: '@myscope/linked', - version: '1.0.0', - }), + otherDirs: { + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + }), + }, + 'link-me-too': { + 'package.json': JSON.stringify({ + name: 'link-me-too', + version: '1.0.0', + }), + }, + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, }, - 'my-project': { + prefixDir: { 'package.json': JSON.stringify({ name: 'my-project', version: '1.0.0', @@ -192,11 +174,6 @@ t.test('link global linked pkg to local nm when using args', async t => { }, }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'my-project') - - const _cwd = process.cwd() - process.chdir(npm.prefix) // installs examples for: // - test-pkg-link: pkg linked to globalDir from local fs @@ -209,71 +186,67 @@ t.test('link global linked pkg to local nm when using args', async t => { '@myscope/linked', '@myscope/bar', 'a', - 'file:../link-me-too', + 'file:../other/link-me-too', ]) - process.chdir(_cwd) - const links = await printLinks({ - path: npm.prefix, - }) - t.matchSnapshot(links, 'should create a local symlink to global pkg') + t.matchSnapshot(await printLinks(), 'should create a local symlink to global pkg') }) t.test('link global linked pkg to local workspace using args', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - '@myscope': { - foo: { - 'package.json': JSON.stringify({ - name: '@myscope/foo', - version: '1.0.0', - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: '@myscope/bar', - version: '1.0.0', - }), - }, - linked: t.fixture('symlink', '../../../../scoped-linked'), - }, - a: { + const { link, printLinks } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + '@myscope': { + foo: { 'package.json': JSON.stringify({ - name: 'a', + name: '@myscope/foo', version: '1.0.0', }), }, - b: { + bar: { 'package.json': JSON.stringify({ - name: 'b', + name: '@myscope/bar', version: '1.0.0', }), }, - 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link'), + linked: t.fixture('symlink', '../../../other/scoped-linked'), }, + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, + 'test-pkg-link': t.fixture('symlink', '../../other/test-pkg-link'), }, }, - 'test-pkg-link': { - 'package.json': JSON.stringify({ - name: 'test-pkg-link', - version: '1.0.0', - }), - }, - 'link-me-too': { - 'package.json': JSON.stringify({ - name: 'link-me-too', - version: '1.0.0', - }), - }, - 'scoped-linked': { - 'package.json': JSON.stringify({ - name: '@myscope/linked', - version: '1.0.0', - }), + otherDirs: { + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + }), + }, + 'link-me-too': { + 'package.json': JSON.stringify({ + name: 'link-me-too', + version: '1.0.0', + }), + }, + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, }, - 'my-project': { + prefixDir: { 'package.json': JSON.stringify({ name: 'my-project', version: '1.0.0', @@ -299,13 +272,8 @@ t.test('link global linked pkg to local workspace using args', async t => { }, }, }, + config: { workspace: 'x' }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'my-project') - npm.localPrefix = resolve(testdir, 'my-project') - - const _cwd = process.cwd() - process.chdir(npm.prefix) // installs examples for: // - test-pkg-link: pkg linked to globalDir from local fs @@ -313,143 +281,113 @@ t.test('link global linked pkg to local workspace using args', async t => { // - @myscope/bar: prev installed scoped package available in globalDir // - a: prev installed package available in globalDir // - file:./link-me-too: pkg that needs to be reified in globalDir first - await link.execWorkspaces([ + await link.exec([ 'test-pkg-link', '@myscope/linked', '@myscope/bar', 'a', - 'file:../link-me-too', - ], ['x']) - process.chdir(_cwd) - - const links = await printLinks({ - path: npm.prefix, - }) + 'file:../other/link-me-too', + ]) - t.matchSnapshot(links, 'should create a local symlink to global pkg') + t.matchSnapshot(await printLinks(), 'should create a local symlink to global pkg') }) t.test('link pkg already in global space', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - '@myscope': { - linked: t.fixture('symlink', '../../../../scoped-linked'), - }, + const { npm, link, printLinks, prefix } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + '@myscope': { + linked: t.fixture('symlink', '../../../other/scoped-linked'), }, }, }, - 'scoped-linked': { - 'package.json': JSON.stringify({ - name: '@myscope/linked', - version: '1.0.0', - }), + otherDirs: { + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, }, - 'my-project': { + prefixDir: { 'package.json': JSON.stringify({ name: 'my-project', version: '1.0.0', }), }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'my-project') + // XXX: how to convert this to a config that gets passed in? npm.config.find = () => 'default' - const _cwd = process.cwd() - process.chdir(npm.prefix) - - // installs examples for: - // - test-pkg-link: pkg linked to globalDir from local fs - // - @myscope/linked: scoped pkg linked to globalDir from local fs - // - @myscope/bar: prev installed scoped package available in globalDir - // - a: prev installed package available in globalDir - // - file:./link-me-too: pkg that needs to be reified in globalDir first await link.exec(['@myscope/linked']) - process.chdir(_cwd) - npm.config.find = () => null - - const links = await printLinks({ - path: npm.prefix, - }) t.equal( - require(resolve(testdir, 'my-project', 'package.json')).dependencies, + require(resolve(prefix, 'package.json')).dependencies, undefined, 'should not save to package.json upon linking' ) - t.matchSnapshot(links, 'should create a local symlink to global pkg') + t.matchSnapshot(await printLinks(), 'should create a local symlink to global pkg') }) t.test('link pkg already in global space when prefix is a symlink', async t => { - const testdir = t.testdir({ - 'global-prefix': t.fixture('symlink', './real-global-prefix'), - 'real-global-prefix': { - lib: { + const { npm, link, printLinks, prefix } = await mockLink(t, { + globalPrefixDir: t.fixture('symlink', './other/real-global-prefix'), + otherDirs: { + // mockNpm does this automatically but only for globalPrefixDir so here we + // need to do it manually since we are making a symlink somewhere else + 'real-global-prefix': mockNpm.setGlobalNodeModules({ node_modules: { '@myscope': { - linked: t.fixture('symlink', '../../../../scoped-linked'), + linked: t.fixture('symlink', '../../../scoped-linked'), }, }, - }, - }, - 'scoped-linked': { - 'package.json': JSON.stringify({ - name: '@myscope/linked', - version: '1.0.0', }), + 'scoped-linked': { + 'package.json': JSON.stringify({ + name: '@myscope/linked', + version: '1.0.0', + }), + }, }, - 'my-project': { + prefixDir: { 'package.json': JSON.stringify({ name: 'my-project', version: '1.0.0', }), }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'my-project') npm.config.find = () => 'default' - const _cwd = process.cwd() - process.chdir(npm.prefix) - await link.exec(['@myscope/linked']) - process.chdir(_cwd) - npm.config.find = () => null - - const links = await printLinks({ - path: npm.prefix, - }) t.equal( - require(resolve(testdir, 'my-project', 'package.json')).dependencies, + require(resolve(prefix, 'package.json')).dependencies, undefined, 'should not save to package.json upon linking' ) - t.matchSnapshot(links, 'should create a local symlink to global pkg') + t.matchSnapshot(await printLinks(), 'should create a local symlink to global pkg') }) t.test('should not prune dependencies when linking packages', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - linked: t.fixture('symlink', '../../../linked'), - }, + const { link, prefix } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + linked: t.fixture('symlink', '../../other/linked'), }, }, - linked: { - 'package.json': JSON.stringify({ - name: 'linked', - version: '1.0.0', - }), + otherDirs: { + linked: { + 'package.json': JSON.stringify({ + name: 'linked', + version: '1.0.0', + }), + }, }, - 'my-project': { + prefixDir: { node_modules: { foo: { 'package.json': JSON.stringify({ name: 'foo', version: '1.0.0' }), @@ -461,37 +399,29 @@ t.test('should not prune dependencies when linking packages', async t => { }), }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'my-project') - - const _cwd = process.cwd() - process.chdir(npm.prefix) await link.exec(['linked']) t.ok( - fs.statSync(resolve(testdir, 'my-project/node_modules/foo')), + fs.statSync(resolve(prefix, 'node_modules/foo')), 'should not prune any extraneous dep when running npm link' ) - process.chdir(_cwd) }) t.test('completion', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - foo: {}, - bar: {}, - lorem: {}, - ipsum: {}, - }, + const { link } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + foo: {}, + bar: {}, + lorem: {}, + ipsum: {}, }, }, }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') const words = await link.completion({}) + t.same( words, ['bar', 'foo', 'ipsum', 'lorem'], @@ -500,13 +430,9 @@ t.test('completion', async t => { }) t.test('--global option', async t => { - t.teardown(() => { - npm.config = _config + const { link } = await mockLink(t, { + config: { global: true }, }) - const _config = npm.config - npm.config = { get () { - return true - } } await t.rejects( link.exec([]), /link should never be --global/, @@ -515,44 +441,37 @@ t.test('--global option', async t => { }) t.test('hash character in working directory path', async t => { - const testdir = t.testdir({ - 'global-prefix': { - lib: { - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, + const { link, printLinks } = await mockLink(t, { + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, }, }, - 'i_like_#_in_my_paths': { - 'test-pkg-link': { - 'package.json': JSON.stringify({ - name: 'test-pkg-link', - version: '1.0.0', - }), + otherDirs: { + 'i_like_#_in_my_paths': { + 'test-pkg-link': { + 'package.json': JSON.stringify({ + name: 'test-pkg-link', + version: '1.0.0', + }), + }, }, }, + chdir: ({ other }) => join(other, 'i_like_#_in_my_paths', 'test-pkg-link'), }) - npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules') - npm.prefix = resolve(testdir, 'i_like_#_in_my_paths', 'test-pkg-link') - - link.workspacePaths = null await link.exec([]) - const links = await printLinks({ - path: resolve(npm.globalDir, '..'), - global: true, - }) - t.matchSnapshot(links, 'should create a global link to current pkg, even within path with hash') + t.matchSnapshot(await printLinks({ global: true }), + 'should create a global link to current pkg, even within path with hash') }) t.test('test linked installed as symlinks', async t => { - // fakeMock is insufficient due to lack of flatOptions - const { npm } = await fullMockNpm(t, { + const { link, prefix, printLinks } = await mockLink(t, { otherDirs: { mylink: { 'package.json': JSON.stringify({ @@ -563,20 +482,13 @@ t.test('test linked installed as symlinks', async t => { }, }) - const _cwd = process.cwd() - process.chdir(npm.prefix) - - await npm.exec('link', [ + await link.exec([ join('file:../other/mylink'), ]) - process.chdir(_cwd) - const links = await printLinks({ - path: npm.prefix, - }) - t.ok(fs.lstatSync(join(npm.prefix, 'node_modules', 'mylink')).isSymbolicLink(), + t.ok(fs.lstatSync(join(prefix, 'node_modules', 'mylink')).isSymbolicLink(), 'linked path should by symbolic link' ) - t.matchSnapshot(links, 'linked package should not be installed') + t.matchSnapshot(await printLinks(), 'linked package should not be installed') }) diff --git a/deps/npm/test/lib/commands/ll.js b/deps/npm/test/lib/commands/ll.js index c39d4338120d46..0977ef4ac5eae9 100644 --- a/deps/npm/test/lib/commands/ll.js +++ b/deps/npm/test/lib/commands/ll.js @@ -1,4 +1,5 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') t.test('ll', t => { t.plan(3) @@ -13,8 +14,8 @@ t.test('ll', t => { } } - const LL = t.mock('../../../lib/commands/ll.js', { - '../../../lib/commands/ls.js': LS, + const LL = tmock(t, '{LIB}/commands/ll.js', { + '{LIB}/commands/ls.js': LS, }) const ll = new LL({ config: { diff --git a/deps/npm/test/lib/commands/logout.js b/deps/npm/test/lib/commands/logout.js index 73fe8028c7853b..0043bb4c57922a 100644 --- a/deps/npm/test/lib/commands/logout.js +++ b/deps/npm/test/lib/commands/logout.js @@ -1,69 +1,53 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const config = { - registry: 'https://registry.npmjs.org/', - scope: '', -} -const flatOptions = { - registry: 'https://registry.npmjs.org/', - scope: '', -} -const npm = mockNpm({ config, flatOptions }) -let result = null - -const mockLogout = (otherMocks) => { - const Logout = t.mock('../../../lib/commands/logout.js', { - 'npm-registry-fetch': (url, opts) => { - result = { url, opts } +const fs = require('fs/promises') +const npmFetch = require('npm-registry-fetch') +const mockNpm = require('../../fixtures/mock-npm') +const { join } = require('path') + +const mockLogout = async (t, { userRc = [], ...npmOpts } = {}) => { + let result = null + + const mock = await mockNpm(t, { + mocks: { + // XXX: refactor to use mock registry + 'npm-registry-fetch': Object.assign(async (url, opts) => { + result = { url, opts } + }, npmFetch), + }, + ...npmOpts, + homeDir: { + '.npmrc': userRc.join('\n'), }, - ...otherMocks, }) - return new Logout(npm) -} - -t.afterEach(() => { - delete flatOptions.token - result = null - config.clearCredentialsByURI = null - config.delete = null - config.save = null -}) - -t.test('token logout', async t => { - t.plan(5) - - flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' - - npm.config.clearCredentialsByURI = registry => { - t.equal( - registry, - 'https://registry.npmjs.org/', - 'should clear credentials from the expected registry' - ) - } - npm.config.save = type => { - t.equal(type, 'user', 'should save to user config') + return { + ...mock, + logout: { exec: (args) => mock.npm.exec('logout', args) }, + result: () => result, + // get only the message portion of the verbose log from the command + logMsg: () => mock.logs.verbose.find(l => l[0] === 'logout')[1], + userRc: () => fs.readFile(join(mock.home, '.npmrc'), 'utf-8').then(r => r.trim()), } +} - const logout = mockLogout({ - 'proc-log': { - verbose: (title, msg) => { - t.equal(title, 'logout', 'should have correcct log prefix') - t.equal( - msg, - 'clearing token for https://registry.npmjs.org/', - 'should log message with correct registry' - ) - }, - }, +t.test('token logout', async t => { + const { logout, logMsg, result, userRc } = await mockLogout(t, { + userRc: [ + '//registry.npmjs.org/:_authToken=@foo/', + 'other-config=true', + ], }) await logout.exec([]) + t.equal( + logMsg(), + 'clearing token for https://registry.npmjs.org/', + 'should log message with correct registry' + ) + t.match( - result, + result(), { url: '/-/user/token/%40foo%2F', opts: { @@ -76,64 +60,30 @@ t.test('token logout', async t => { }, 'should call npm-registry-fetch with expected values' ) + + t.equal(await userRc(), 'other-config=true') }) t.test('token scoped logout', async t => { - t.teardown(() => { - config.scope = '' - delete flatOptions['//diff-registry.npmjs.com/:_authToken'] - delete flatOptions['//registry.npmjs.org/:_authToken'] - delete config['@myscope:registry'] - delete flatOptions.scope - result = null - config.clearCredentialsByURI = null - config.delete = null - config.save = null - }) - - t.plan(7) - - flatOptions['//diff-registry.npmjs.com/:_authToken'] = '@bar/' - flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' - config.scope = '@myscope' - config['@myscope:registry'] = 'https://diff-registry.npmjs.com/' - flatOptions.scope = '@myscope' - flatOptions['@myscope:registry'] = 'https://diff-registry.npmjs.com/' - - npm.config.clearCredentialsByURI = registry => { - t.equal( - registry, - 'https://diff-registry.npmjs.com/', - 'should clear credentials from the expected registry' - ) - } - - npm.config.delete = (ref, type) => { - t.equal(ref, '@myscope:registry', 'should delete scoped registyr from config') - t.equal(type, 'user', 'should delete from user config') - } - - npm.config.save = type => { - t.equal(type, 'user', 'should save to user config') - } - - const logout = mockLogout({ - 'proc-log': { - verbose: (title, msg) => { - t.equal(title, 'logout', 'should have correcct log prefix') - t.equal( - msg, - 'clearing token for https://diff-registry.npmjs.com/', - 'should log message with correct registry' - ) - }, - }, + const { logout, logMsg, result, userRc } = await mockLogout(t, { + config: { scope: '@myscope' }, + userRc: [ + '//diff-registry.npmjs.com/:_authToken=@bar/', + '//registry.npmjs.org/:_authToken=@foo/', + '@myscope:registry=https://diff-registry.npmjs.com/', + ], }) await logout.exec([]) + t.equal( + logMsg(), + 'clearing token for https://diff-registry.npmjs.com/', + 'should log message with correct registry' + ) + t.match( - result, + result(), { url: '/-/user/token/%40bar%2F', opts: { @@ -148,41 +98,32 @@ t.test('token scoped logout', async t => { }, 'should call npm-registry-fetch with expected values' ) + + t.equal(await userRc(), '//registry.npmjs.org/:_authToken=@foo/') }) t.test('user/pass logout', async t => { - t.teardown(() => { - delete flatOptions['//registry.npmjs.org/:username'] - delete flatOptions['//registry.npmjs.org/:_password'] - npm.config.clearCredentialsByURI = null - npm.config.save = null - }) - t.plan(2) - - flatOptions['//registry.npmjs.org/:username'] = 'foo' - flatOptions['//registry.npmjs.org/:_password'] = 'bar' - - npm.config.clearCredentialsByURI = () => null - npm.config.save = () => null - - const logout = mockLogout({ - 'proc-log': { - verbose: (title, msg) => { - t.equal(title, 'logout', 'should have correct log prefix') - t.equal( - msg, - 'clearing user credentials for https://registry.npmjs.org/', - 'should log message with correct registry' - ) - }, - }, + const { logout, logMsg, userRc } = await mockLogout(t, { + userRc: [ + '//registry.npmjs.org/:username=foo', + '//registry.npmjs.org/:_password=bar', + 'other-config=true', + ], }) await logout.exec([]) + + t.equal( + logMsg(), + 'clearing user credentials for https://registry.npmjs.org/', + 'should log message with correct registry' + ) + + t.equal(await userRc(), 'other-config=true') }) t.test('missing credentials', async t => { - const logout = mockLogout() + const { logout } = await mockLogout(t) await t.rejects( logout.exec([]), @@ -195,57 +136,35 @@ t.test('missing credentials', async t => { }) t.test('ignore invalid scoped registry config', async t => { - t.teardown(() => { - delete flatOptions.token - result = null - config.clearCredentialsByURI = null - config.delete = null - config.save = null - }) - t.plan(4) - - flatOptions['//registry.npmjs.org/:_authToken'] = '@foo/' - config.scope = '@myscope' - flatOptions['@myscope:registry'] = '' - - npm.config.clearCredentialsByURI = registry => { - t.equal( - registry, - 'https://registry.npmjs.org/', - 'should clear credentials from the expected registry' - ) - } - - npm.config.delete = () => null - npm.config.save = () => null - - const logout = mockLogout({ - 'proc-log': { - verbose: (title, msg) => { - t.equal(title, 'logout', 'should have correcct log prefix') - t.equal( - msg, - 'clearing token for https://registry.npmjs.org/', - 'should log message with correct registry' - ) - }, - }, + const { logout, logMsg, result, userRc } = await mockLogout(t, { + config: { scope: '@myscope' }, + userRc: [ + '//registry.npmjs.org/:_authToken=@foo/', + 'other-config=true', + ], }) await logout.exec([]) + t.equal( + logMsg(), + 'clearing token for https://registry.npmjs.org/', + 'should log message with correct registry' + ) + t.match( - result, + result(), { url: '/-/user/token/%40foo%2F', opts: { '//registry.npmjs.org/:_authToken': '@foo/', registry: 'https://registry.npmjs.org/', - '@myscope:registry': '', method: 'DELETE', ignoreBody: true, }, }, 'should call npm-registry-fetch with expected values' ) + + t.equal(await userRc(), 'other-config=true') }) diff --git a/deps/npm/test/lib/commands/ls.js b/deps/npm/test/lib/commands/ls.js index b9278dd20688d9..9b773345525b5d 100644 --- a/deps/npm/test/lib/commands/ls.js +++ b/deps/npm/test/lib/commands/ls.js @@ -2,17 +2,18 @@ // Consider using t.matchSnapshot on these instead, especially since many // of them contain the tap testdir folders, which are auto-generated and // may change when node-tap is updated. -const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm.js') -const { resolve } = require('path') +const t = require('tap') const { utimesSync } = require('fs') +const mockNpm = require('../../fixtures/mock-npm.js') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + const touchHiddenPackageLock = prefix => { const later = new Date(Date.now() + 10000) utimesSync(`${prefix}/node_modules/.package-lock.json`, later, later) } -t.cleanSnapshot = str => str.split(/\r\n/).join('\n') +t.cleanSnapshot = str => cleanCwd(str) const simpleNmFixture = { node_modules: { @@ -89,775 +90,827 @@ const diffDepTypesNmFixture = { }, } -let result = '' -const LS = t.mock('../../../lib/commands/ls.js', { - path: { - ...require('path'), - sep: '/', - }, -}) -const config = { - all: true, - color: false, - depth: Infinity, - global: false, - json: false, - link: false, - location: 'project', - omit: [], - parseable: false, - 'package-lock-only': false, -} -const flatOptions = { - workspacesEnabled: true, -} -const npm = mockNpm({ - config, - flatOptions, - output: msg => { - result = msg - }, -}) -const ls = new LS(npm) +const mockLs = async (t, { mocks, config, ...opts } = {}) => { + const mock = await mockNpm(t, { + ...opts, + config: { + all: true, + ...config, + }, + command: 'ls', + mocks: { + path: { + ...require('path'), + sep: '/', + }, + ...mocks, + }, + }) -const redactCwd = res => - res && - res.replace(/\\+/g, '/').replace(new RegExp(__dirname.replace(/\\+/g, '/'), 'gi'), '{CWD}') + return { + ...mock, + result: () => mock.joinedOutput(), + } +} const redactCwdObj = obj => { if (Array.isArray(obj)) { return obj.map(o => redactCwdObj(o)) - } else if (typeof obj === 'string') { - return redactCwd(obj) - } else if (!obj) { - return obj - } else if (typeof obj === 'object') { + } + if (obj && typeof obj === 'object') { return Object.keys(obj).reduce((o, k) => { o[k] = redactCwdObj(obj[k]) return o }, {}) - } else { - return obj } + return typeof obj === 'string' ? cleanCwd(obj) : obj } const jsonParse = res => redactCwdObj(JSON.parse(res)) -const cleanUpResult = () => (result = '') - -t.test('ls', t => { - t.beforeEach(cleanUpResult) - config.json = false - config.unicode = false +t.test('ls', async t => { t.test('no args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + // config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree representation of dependencies structure' ) }) t.test('missing package.json', async t => { - npm.prefix = t.testdir({ - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree missing name/version of top-level package' ) }) t.test('workspace and missing optional dep', async t => { - npm.prefix = npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'root', - dependencies: { - foo: '^1.0.0', - }, - optionalDependencies: { - bar: '^1.0.0', - }, - workspaces: ['./baz'], - }), - baz: { + const config = { + 'include-workspace-root': true, + workspace: 'baz', + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { 'package.json': JSON.stringify({ - name: 'baz', - version: '1.0.0', + name: 'root', + dependencies: { + foo: '^1.0.0', + }, + optionalDependencies: { + bar: '^1.0.0', + }, + workspaces: ['./baz'], }), - }, - node_modules: { - baz: t.fixture('symlink', '../baz'), - foo: { + baz: { 'package.json': JSON.stringify({ - name: 'foo', + name: 'baz', version: '1.0.0', }), }, + node_modules: { + baz: t.fixture('symlink', '../baz'), + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }, + }, }, }) - npm.flatOptions.includeWorkspaceRoot = true - t.teardown(() => { - delete npm.flatOptions.includeWorkspaceRoot - }) - - await ls.execWorkspaces([], ['baz']) - t.matchSnapshot(redactCwd(result), 'should omit missing optional dep') + await ls.exec([]) + t.matchSnapshot(cleanCwd(result()), 'should omit missing optional dep') }) t.test('extraneous deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output containing problems info') + t.matchSnapshot(cleanCwd(result()), 'should output containing problems info') }) t.test('overridden dep', async t => { - config.all = true - t.teardown(() => { - config.all = false - }) + const config = { + } - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-overridden', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - overrides: { - bar: '1.0.0', - }, - }), - node_modules: { - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - dependencies: { - bar: '^2.0.0', - }, - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-overridden', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + overrides: { + bar: '1.0.0', + }, + }), + node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + bar: '^2.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ + name: 'bar', + version: '1.0.0', + }), + }, }, }, }) - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should contain overridden outout') + await + + ls.exec([]) + t.matchSnapshot(cleanCwd(result()), 'should contain overridden outout') }) t.test('overridden dep w/ color', async t => { - config.all = true - npm.color = true - t.teardown(() => { - config.all = false - npm.color = false - }) + const config = { + color: 'always', + } - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-overridden', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - overrides: { - bar: '1.0.0', - }, - }), - node_modules: { - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - dependencies: { - bar: '^2.0.0', - }, - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-overridden', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + overrides: { + bar: '1.0.0', + }, + }), + node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + bar: '^2.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ + name: 'bar', + version: '1.0.0', + }), + }, }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should contain overridden outout') + t.matchSnapshot(cleanCwd(result()), 'should contain overridden outout') }) t.test('with filter arg', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const config = { + color: 'always', + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['chai']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree contaning only occurrences of filtered by package and colored output' ) - npm.color = false }) t.test('with dot filter arg', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const config = { + all: false, + depth: 0, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['.']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree contaning only occurrences of filtered by package and colored output' ) - config.all = true - config.depth = Infinity - process.exitCode = 0 }) t.test('with filter arg nested dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['dog']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree contaning only occurrences of filtered package and its ancestors' ) }) t.test('with multiple filter args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - node_modules: { - ...simpleNmFixture.node_modules, - ipsum: { - 'package.json': JSON.stringify({ - name: 'ipsum', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + node_modules: { + ...simpleNmFixture.node_modules, + ipsum: { + 'package.json': JSON.stringify({ + name: 'ipsum', + version: '1.0.0', + }), + }, }, }, }) await ls.exec(['dog@*', 'chai@1.0.0']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), /* eslint-disable-next-line max-len */ 'should output tree contaning only occurrences of multiple filtered packages and their ancestors' ) }) t.test('with missing filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['notadep']) - t.matchSnapshot(redactCwd(result), 'should output tree containing no dependencies info') + t.matchSnapshot(cleanCwd(result()), 'should output tree containing no dependencies info') t.equal(process.exitCode, 1, 'should exit with error code 1') - process.exitCode = 0 }) t.test('default --depth value should be 0', async t => { - config.all = false - config.depth = undefined - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const config = { + all: false, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - config.all = true - config.depth = Infinity + t.matchSnapshot(cleanCwd(result()), + 'should output tree containing only top-level dependencies') }) t.test('--depth=0', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const config = { + all: false, + depth: 0, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - config.all = true - config.depth = Infinity + t.matchSnapshot(cleanCwd(result()), + 'should output tree containing only top-level dependencies') }) t.test('--depth=1', async t => { - config.all = false - config.depth = 1 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - e: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - c: '^1.0.0', - d: '*', - }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - }), - }, - e: { - 'package.json': JSON.stringify({ - name: 'e', - version: '1.0.0', - }), + const config = { + all: false, + depth: 1, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + e: '^1.0.0', + }, + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + c: '^1.0.0', + d: '*', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + }), + }, + e: { + 'package.json': JSON.stringify({ + name: 'e', + version: '1.0.0', + }), + }, }, }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree containing top-level deps and their deps only' ) - config.all = true - config.depth = Infinity }) t.test('missing/invalid/extraneous', async t => { t.plan(3) - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]).catch(err => { t.equal(err.code, 'ELSPROBLEMS', 'should have error code') t.equal( - redactCwd(err.message).replace(/\r\n/g, '\n'), - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/chai\n' + - 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls-missing-invalid-extraneous/node_modules/foo\n' + + cleanCwd(err.message), + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai\n' + + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo\n' + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', 'should log missing/invalid/extraneous errors' ) }) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree containing missing, invalid, extraneous labels' ) }) t.test('colored output', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const config = { + color: 'always', + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should have error code') - t.matchSnapshot(redactCwd(result), 'should output tree containing color info') - npm.color = false + t.matchSnapshot(cleanCwd(result()), 'should output tree containing color info') }) t.test('--dev', async t => { - flatOptions.omit = ['peer', 'prod', 'optional'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + omit: ['peer', 'prod', 'optional'], + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') - flatOptions.omit = [] + t.matchSnapshot(cleanCwd(result()), 'should output tree containing dev deps') }) t.test('--link', async t => { - config.link = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - 'linked-dep': '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - 'linked-dep': { + const config = { + link: true, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { 'package.json': JSON.stringify({ - name: 'linked-dep', + name: 'test-npm-ls', version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + 'linked-dep': '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, }), - }, - node_modules: { - 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules, + 'linked-dep': { + 'package.json': JSON.stringify({ + name: 'linked-dep', + version: '1.0.0', + }), + }, + node_modules: { + 'linked-dep': t.fixture('symlink', '../linked-dep'), + ...diffDepTypesNmFixture.node_modules, + }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - config.link = false + t.matchSnapshot(cleanCwd(result()), 'should output tree containing linked deps') }) t.test('print deduped symlinks', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'print-deduped-symlinks', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^1.0.0', - }, - }), - b: { + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { 'package.json': JSON.stringify({ - name: 'b', + name: 'print-deduped-symlinks', version: '1.0.0', + dependencies: { + a: '^1.0.0', + b: '^1.0.0', + }, }), - }, - node_modules: { - a: { + b: { 'package.json': JSON.stringify({ - name: 'a', + name: 'b', version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, }), }, - b: t.fixture('symlink', '../b'), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + }, + }), + }, + b: t.fixture('symlink', '../b'), + }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - config.link = false + t.matchSnapshot(cleanCwd(result()), 'should output tree containing linked deps') }) t.test('--production', async t => { - flatOptions.omit = ['dev', 'peer'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { omit: ['dev', 'peer'] }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') - flatOptions.omit = [] + t.matchSnapshot(cleanCwd(result()), 'should output tree containing production deps') }) t.test('--long', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const config = { + long: true, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') - config.long = true + t.matchSnapshot(cleanCwd(result()), 'should output tree info with descriptions') }) t.test('--long --depth=0', async t => { - config.all = false - config.depth = 0 - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const config = { + all: false, + depth: 0, + long: true, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree containing top-level deps with descriptions' ) - config.all = true - config.depth = Infinity - config.long = false }) t.test('json read problems', async t => { - npm.prefix = t.testdir({ - 'package.json': '{broken json', + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': '{broken json', + }, }) await t.rejects(ls.exec([]), { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') - t.matchSnapshot(redactCwd(result), 'should print empty result') + t.matchSnapshot(cleanCwd(result()), 'should print empty result') }) t.test('empty location', async t => { - npm.prefix = t.testdir({}) + const { ls, result } = await mockLs(t) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print empty result') + t.matchSnapshot(cleanCwd(result()), 'should print empty result') }) t.test('invalid peer dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^2.0.0', // mismatching version # - }, - }), - ...diffDepTypesNmFixture, - }) - await t.rejects(ls.exec([])) - t.matchSnapshot( - redactCwd(result), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^2.0.0', // mismatching version # + }, + }), + ...diffDepTypesNmFixture, + }, + }) + await t.rejects(ls.exec([])) + t.matchSnapshot( + cleanCwd(result()), 'should output tree signaling mismatching peer dep in problems' ) }) t.test('invalid deduped dep', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'invalid-deduped-dep', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^2.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^2.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), + const config = { + color: 'always', + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'invalid-deduped-dep', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + b: '^2.0.0', + }, + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^2.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, }, }, }) await t.rejects(ls.exec([])) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree signaling mismatching peer dep in problems' ) - npm.color = false }) t.test('deduped missing dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + b: '^1.0.0', + }, + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + }, + }), + }, }, }, }) @@ -867,51 +920,58 @@ t.test('ls', t => { 'should list missing dep problem' ) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable signaling missing peer dep in problems' ) }) t.test('unmet peer dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - peerDependencies: { - 'peer-dep': '*', - }, - }), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + peerDependencies: { + 'peer-dep': '*', + }, + }), + }, }) await t.rejects( ls.exec([]), { code: 'ELSPROBLEMS', message: 'missing: peer-dep@*, required by test-npm-ls@1.0.0' }, 'should have missing peer-dep error msg' ) - t.matchSnapshot(redactCwd(result), 'should output tree signaling missing peer dep in problems') + t.matchSnapshot(cleanCwd(result()), + 'should output tree signaling missing peer dep in problems') }) t.test('unmet optional dep', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0', // mismatching version # - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const config = { color: 'always' } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'missing-optional-dep': '^1.0.0', + 'optional-dep': '^2.0.0', // mismatching version # + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await t.rejects( ls.exec([]), @@ -919,116 +979,38 @@ t.test('ls', t => { 'should have invalid dep error msg' ) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree with empty entry for missing optional deps' ) - npm.color = false }) t.test('cycle deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - }, - }, - }) - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - }) - - t.test('cycle deps with filter args', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - }, - }, - }) - await ls.exec(['a']) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - npm.color = false - }) - - t.test('with no args dedupe entries', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'dedupe-entries', - version: '1.0.0', - dependencies: { - '@npmcli/a': '^1.0.0', - '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0', - }, - }), - node_modules: { - '@npmcli': { + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + node_modules: { a: { 'package.json': JSON.stringify({ - name: '@npmcli/a', + name: 'a', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0', + b: '^1.0.0', }, }), }, b: { 'package.json': JSON.stringify({ - name: '@npmcli/b', - version: '1.1.2', - }), - }, - c: { - 'package.json': JSON.stringify({ - name: '@npmcli/c', + name: 'b', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0', + a: '^1.0.0', }, }), }, @@ -1036,400 +1018,509 @@ t.test('ls', t => { }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') }) - t.test('with no args dedupe entries and not displaying all', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'dedupe-entries', - version: '1.0.0', - dependencies: { - '@npmcli/a': '^1.0.0', - '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0', - }, - }), - node_modules: { - '@npmcli': { + t.test('cycle deps with filter args', async t => { + const config = { color: 'always' } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + node_modules: { a: { 'package.json': JSON.stringify({ - name: '@npmcli/a', + name: 'a', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0', + b: '^1.0.0', }, }), }, b: { 'package.json': JSON.stringify({ - name: '@npmcli/b', - version: '1.1.2', - }), - }, - c: { - 'package.json': JSON.stringify({ - name: '@npmcli/c', + name: 'b', version: '1.0.0', dependencies: { - '@npmcli/b': '^1.0.0', + a: '^1.0.0', }, }), }, }, }, }) - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - config.all = true - config.depth = Infinity + await ls.exec(['a']) + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') }) - t.test('with args and dedupe entries', async t => { - npm.color = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'dedupe-entries', - version: '1.0.0', - dependencies: { - '@npmcli/a': '^1.0.0', - '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0', - }, - }), - node_modules: { - '@npmcli': { - a: { - 'package.json': JSON.stringify({ - name: '@npmcli/a', - version: '1.0.0', - dependencies: { - '@npmcli/b': '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: '@npmcli/b', - version: '1.1.2', - }), + t.test('with no args dedupe entries', async t => { + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'dedupe-entries', + version: '1.0.0', + dependencies: { + '@npmcli/a': '^1.0.0', + '@npmcli/b': '^1.0.0', + '@npmcli/c': '^1.0.0', }, - c: { - 'package.json': JSON.stringify({ - name: '@npmcli/c', - version: '1.0.0', - dependencies: { - '@npmcli/b': '^1.0.0', - }, - }), + }), + node_modules: { + '@npmcli': { + a: { + 'package.json': JSON.stringify({ + name: '@npmcli/a', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: '@npmcli/b', + version: '1.1.2', + }), + }, + c: { + 'package.json': JSON.stringify({ + name: '@npmcli/c', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, }, }, }, }) - await ls.exec(['@npmcli/b']) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') - npm.color = false + await ls.exec([]) + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') }) - t.test('with args and different order of items', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'dedupe-entries', - version: '1.0.0', - dependencies: { - '@npmcli/a': '^1.0.0', - '@npmcli/b': '^1.0.0', - '@npmcli/c': '^1.0.0', - }, - }), - node_modules: { - '@npmcli': { - a: { - 'package.json': JSON.stringify({ - name: '@npmcli/a', - version: '1.0.0', - dependencies: { - '@npmcli/c': '^1.0.0', - }, - }), + t.test('with no args dedupe entries and not displaying all', async t => { + const config = { + all: false, + depth: 0, + } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'dedupe-entries', + version: '1.0.0', + dependencies: { + '@npmcli/a': '^1.0.0', + '@npmcli/b': '^1.0.0', + '@npmcli/c': '^1.0.0', }, - b: { - 'package.json': JSON.stringify({ - name: '@npmcli/b', - version: '1.1.2', - dependencies: { - '@npmcli/c': '^1.0.0', - }, - }), + }), + node_modules: { + '@npmcli': { + a: { + 'package.json': JSON.stringify({ + name: '@npmcli/a', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: '@npmcli/b', + version: '1.1.2', + }), + }, + c: { + 'package.json': JSON.stringify({ + name: '@npmcli/c', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, }, - c: { - 'package.json': JSON.stringify({ - name: '@npmcli/c', - version: '1.0.0', - }), + }, + }, + }) + await ls.exec([]) + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') + }) + + t.test('with args and dedupe entries', async t => { + const config = { color: 'always' } + const { result, ls } = await mockLs(t, { + config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'dedupe-entries', + version: '1.0.0', + dependencies: { + '@npmcli/a': '^1.0.0', + '@npmcli/b': '^1.0.0', + '@npmcli/c': '^1.0.0', + }, + }), + node_modules: { + '@npmcli': { + a: { + 'package.json': JSON.stringify({ + name: '@npmcli/a', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: '@npmcli/b', + version: '1.1.2', + }), + }, + c: { + 'package.json': JSON.stringify({ + name: '@npmcli/c', + version: '1.0.0', + dependencies: { + '@npmcli/b': '^1.0.0', + }, + }), + }, + }, + }, + }, + }) + await ls.exec(['@npmcli/b']) + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') + }) + + t.test('with args and different order of items', async t => { + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'dedupe-entries', + version: '1.0.0', + dependencies: { + '@npmcli/a': '^1.0.0', + '@npmcli/b': '^1.0.0', + '@npmcli/c': '^1.0.0', + }, + }), + node_modules: { + '@npmcli': { + a: { + 'package.json': JSON.stringify({ + name: '@npmcli/a', + version: '1.0.0', + dependencies: { + '@npmcli/c': '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: '@npmcli/b', + version: '1.1.2', + dependencies: { + '@npmcli/c': '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: '@npmcli/c', + version: '1.0.0', + }), + }, }, }, }, }) await ls.exec(['@npmcli/c']) - t.matchSnapshot(redactCwd(result), 'should print tree output containing deduped ref') + t.matchSnapshot(cleanCwd(result()), 'should print tree output containing deduped ref') }) t.test('using aliases', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: 'npm:b@1.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/a': { + const { npm, result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: 'npm:b@1.0.0', + }, + }), + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/a': { + name: 'b', + version: '1.0.0', + from: 'a@npm:b', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + requested: { + type: 'alias', + }, + }, + }, + }), + a: { + 'package.json': JSON.stringify({ name: 'b', version: '1.0.0', - from: 'a@npm:b', - resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', - requested: { + _from: 'a@npm:b', + _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + _requested: { type: 'alias', }, - }, + }), }, - }), - a: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - _from: 'a@npm:b', - _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', - _requested: { - type: 'alias', - }, - }), }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') + t.matchSnapshot(cleanCwd(result()), 'should output tree containing aliases') }) t.test('resolved points to git ref', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - abbrev: 'git+https://github.com/isaacs/abbrev-js.git', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/abbrev': { - name: 'abbrev', - version: '1.1.1', - from: 'git+https://github.com/isaacs/abbrev-js.git', - /* eslint-disable-next-line max-len */ - resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - }, + const { npm, result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', }, }), - abbrev: { - 'package.json': JSON.stringify({ - name: 'abbrev', - version: '1.1.1', - _id: 'abbrev@1.1.1', - _from: 'git+https://github.com/isaacs/abbrev-js.git', - /* eslint-disable-next-line max-len */ - _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - _requested: { - type: 'git', - raw: 'git+https:github.com/isaacs/abbrev-js.git', - rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', - saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', - fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null, + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/abbrev': { + name: 'abbrev', + version: '1.1.1', + from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ + resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + }, }, }), + abbrev: { + 'package.json': JSON.stringify({ + name: 'abbrev', + version: '1.1.1', + _id: 'abbrev@1.1.1', + _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ + _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + _requested: { + type: 'git', + raw: 'git+https:github.com/isaacs/abbrev-js.git', + rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', + saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', + fetchSpec: 'https://github.com/isaacs/abbrev-js.git', + gitCommittish: null, + }, + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing git refs') + t.matchSnapshot(cleanCwd(result()), 'should output tree containing git refs') }) t.test('broken resolved field', async t => { - npm.prefix = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.1', - }), - }, - }, - 'package-lock.json': JSON.stringify({ - name: 'npm-broken-resolved-field-test', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - packages: { - '': { - name: 'a', - version: '1.0.1', - }, - }, - dependencies: { + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + node_modules: { a: { - version: '1.0.1', - resolved: 'foo@dog://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.1', + }), }, }, - }), - 'package.json': JSON.stringify({ - name: 'npm-broken-resolved-field-test', - version: '1.0.0', - dependencies: { - a: '^1.0.1', - }, - }), + 'package-lock.json': JSON.stringify({ + name: 'npm-broken-resolved-field-test', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'a', + version: '1.0.1', + }, + }, + dependencies: { + a: { + version: '1.0.1', + resolved: 'foo@dog://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + }, + }, + }), + 'package.json': JSON.stringify({ + name: 'npm-broken-resolved-field-test', + version: '1.0.0', + dependencies: { + a: '^1.0.1', + }, + }), + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should NOT print git refs in output tree') + t.matchSnapshot(cleanCwd(result()), 'should NOT print git refs in output tree') }) t.test('from and resolved properties', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'simple-output': '^2.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/simple-output': { - name: 'simple-output', - version: '2.1.1', - resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', - shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - }, + const { npm, result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'simple-output': '^2.0.0', }, }), - 'simple-output': { - 'package.json': JSON.stringify({ - name: 'simple-output', - version: '2.1.1', - _from: 'simple-output', - _id: 'simple-output@2.1.1', - _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', - _requested: { - type: 'tag', - registry: true, - raw: 'simple-output', - name: 'simple-output', - escapedName: 'simple-output', - rawSpec: '', - saveSpec: null, - fetchSpec: 'latest', + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/simple-output': { + name: 'simple-output', + version: '2.1.1', + resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', + }, }, - _requiredBy: ['#USER', '/'], - _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output', }), + 'simple-output': { + 'package.json': JSON.stringify({ + name: 'simple-output', + version: '2.1.1', + _from: 'simple-output', + _id: 'simple-output@2.1.1', + _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + _requested: { + type: 'tag', + registry: true, + raw: 'simple-output', + name: 'simple-output', + escapedName: 'simple-output', + rawSpec: '', + saveSpec: null, + fetchSpec: 'latest', + }, + _requiredBy: ['#USER', '/'], + _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', + _spec: 'simple-output', + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') + t.matchSnapshot(cleanCwd(result()), 'should not be printed in tree output') }) t.test('global', async t => { - config.global = true - const fixtures = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - node_modules: { - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), + const config = { + global: true, + } + const { result, ls } = await mockLs(t, { + config, + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + node_modules: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, }, }, }, }, }) - // mimics lib/npm.js globalDir getter but pointing to fixtures - npm.globalDir = resolve(fixtures, 'node_modules') - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print tree and not mark top-level items extraneous') - npm.globalDir = 'MISSING_GLOBAL_DIR' - config.global = false + t.matchSnapshot(cleanCwd(result()), + 'should print tree and not mark top-level items extraneous') }) t.test('filtering by child of missing dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'filter-by-child-of-missing-dep', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - node_modules: { - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - c: '^1.0.0', - }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - dependencies: { - c: '^2.0.0', - }, - }), - node_modules: { - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '2.0.0', - }), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'filter-by-child-of-missing-dep', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + node_modules: { + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + c: '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + dependencies: { + c: '^2.0.0', + }, + }), + node_modules: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '2.0.0', + }), + }, }, }, }, @@ -1438,772 +1529,830 @@ t.test('ls', t => { await ls.exec(['c']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should print tree and not duplicate child of missing items' ) }) t.test('loading a tree containing workspaces', async t => { - npm.localPrefix = npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-tree', - version: '1.0.0', - workspaces: ['./a', './b', './d', './group/*'], - dependencies: { pacote: '1.0.0' }, - }), - node_modules: { - a: t.fixture('symlink', '../a'), - b: t.fixture('symlink', '../b'), - c: { + const mockWorkspaces = async (t, exec = [], config = {}) => { + const { result, ls } = await mockLs(t, { + config, + prefixDir: { 'package.json': JSON.stringify({ - name: 'c', + name: 'workspaces-tree', version: '1.0.0', + workspaces: ['./a', './b', './d', './group/*'], + dependencies: { pacote: '1.0.0' }, }), - }, - d: t.fixture('symlink', '../d'), - e: t.fixture('symlink', '../group/e'), - f: t.fixture('symlink', '../group/f'), - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - dependencies: { - bar: '^1.0.0', + node_modules: { + a: t.fixture('symlink', '../a'), + b: t.fixture('symlink', '../b'), + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, + d: t.fixture('symlink', '../d'), + e: t.fixture('symlink', '../group/e'), + f: t.fixture('symlink', '../group/f'), + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + dependencies: { + bar: '^1.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ name: 'bar', version: '1.0.0' }), + }, + baz: { + 'package.json': JSON.stringify({ name: 'baz', version: '1.0.0' }), + }, + pacote: { + 'package.json': JSON.stringify({ name: 'pacote', version: '1.0.0' }), }, - }), - }, - bar: { - 'package.json': JSON.stringify({ name: 'bar', version: '1.0.0' }), - }, - baz: { - 'package.json': JSON.stringify({ name: 'baz', version: '1.0.0' }), - }, - pacote: { - 'package.json': JSON.stringify({ name: 'pacote', version: '1.0.0' }), - }, - }, - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - c: '^1.0.0', - d: '^1.0.0', }, - devDependencies: { - baz: '^1.0.0', + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + c: '^1.0.0', + d: '^1.0.0', + }, + devDependencies: { + baz: '^1.0.0', + }, + }), }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - dependencies: { - foo: '^1.1.1', + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + dependencies: { + foo: '^1.1.1', + }, + }), + }, + group: { + e: { + 'package.json': JSON.stringify({ + name: 'e', + version: '1.0.0', + }), + }, + f: { + 'package.json': JSON.stringify({ + name: 'f', + version: '1.0.0', + }), + }, }, - }), - }, - group: { - e: { - 'package.json': JSON.stringify({ - name: 'e', - version: '1.0.0', - }), - }, - f: { - 'package.json': JSON.stringify({ - name: 'f', - version: '1.0.0', - }), }, - }, - }) + }) - config.all = false - config.depth = 0 - npm.color = true - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should list workspaces properly with default configs') + await ls.exec(exec) - config.all = false - config.depth = 0 - npm.color = true - npm.flatOptions.workspacesEnabled = false - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should not list workspaces with --no-workspaces') + t.matchSnapshot(cleanCwd(result(), t), 'output') + } - config.all = true - config.depth = Infinity - npm.color = false - npm.flatOptions.workspacesEnabled = true + t.test('should list workspaces properly with default configs', t => mockWorkspaces(t, [], { + depth: 0, + color: 'always', + })) + + t.test('should not list workspaces with --no-workspaces', t => mockWorkspaces(t, [], { + depth: 0, + color: 'always', + workspaces: false, + })) // --all - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should list --all workspaces properly') + t.test('should list --all workspaces properly', t => mockWorkspaces(t)) // --production - flatOptions.omit = ['dev', 'peer', 'optional'] - await ls.exec([]) - - t.matchSnapshot(redactCwd(result), 'should list only prod deps of workspaces') - - flatOptions.omit = [] + t.test('should list only prod deps of workspaces', t => mockWorkspaces(t, [], { + omit: ['dev', 'peer', 'optional'], + })) // filter out a single workspace using args - await ls.exec(['d']) - t.matchSnapshot(redactCwd(result), 'should filter single workspace') + t.test('should filter single workspace', t => mockWorkspaces(t, ['d'])) // filter out a single workspace and its deps using workspaces filters - await ls.execWorkspaces([], ['a']) - - t.matchSnapshot(redactCwd(result), 'should filter using workspace config') + t.test('should filter using workspace config', t => mockWorkspaces(t, [], { + workspace: 'a', + })) // filter out a single workspace and include root - npm.flatOptions.includeWorkspaceRoot = true - await ls.execWorkspaces([], ['d']) - t.matchSnapshot(redactCwd(result), 'should inlude root and specified workspace') - npm.flatOptions.includeWorkspaceRoot = false + t.test('should inlude root and specified workspace', t => mockWorkspaces(t, [], { + 'include-workspace-root': true, + workspace: 'd', + })) // filter out a workspace by parent path - await ls.execWorkspaces([], ['./group']) - - t.matchSnapshot(redactCwd(result), 'should filter by parent folder workspace config') + t.test('should filter by parent folder workspace config', t => mockWorkspaces(t, [], { + workspace: './group', + })) // filter by a dep within a workspaces sub tree - await ls.execWorkspaces(['bar'], ['d']) - - t.matchSnapshot( - redactCwd(result), - 'should print all tree and filter by dep within only the ws subtree' - ) + t.test('should print all tree and filter by dep within only the ws subtree', t => + mockWorkspaces(t, ['bar'], { + workspace: 'd', + })) }) t.test('filter pkg arg using depth option', async t => { - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-pkg-arg-filter-with-depth-opt', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - b: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { + const mock = async (t, exec, depth = 0) => { + const { result, ls } = await mockLs(t, { + config: typeof depth === 'number' ? { depth } : {}, + prefixDir: { 'package.json': JSON.stringify({ - name: 'b', + name: 'test-pkg-arg-filter-with-depth-opt', version: '1.0.0', dependencies: { - c: '^1.0.0', + a: '^1.0.0', + b: '^1.0.0', }, }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - dependencies: { - d: '^1.0.0', + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - dependencies: { - a: '^1.0.0', + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + c: '^1.0.0', + }, + }), }, - }), + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + dependencies: { + d: '^1.0.0', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + }, + }, }, - }, - }) + }) - t.plan(3) - await ls.exec(['a']) - t.matchSnapshot(redactCwd(result), 'should list a in top-level only') + await ls.exec(exec) - await ls.exec(['d']) - t.matchSnapshot(redactCwd(result), 'should print empty results msg') + t.matchSnapshot(cleanCwd(result(), t), 'output') + } - // if no --depth config is defined, should print path to dep - config.depth = null // default config value - await ls.exec(['d']) - t.matchSnapshot(redactCwd(result), 'should print expected result') - process.exitCode = 0 - }) + t.test('should list a in top-level only', t => mock(t, ['a'])) - t.teardown(() => { - config.depth = Infinity - }) + t.test('should print empty results msg', t => mock(t, ['d'])) - t.end() + // if no --depth config is defined, should print path to dep + t.test('should print expected result', t => mock(t, ['d'], null)) + }) }) -t.test('ls --parseable', t => { - t.beforeEach(cleanUpResult) - config.json = false - config.unicode = false - config.parseable = true +t.test('ls --parseable', async t => { + const parseable = { parseable: true } t.test('no args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable representation of dependencies structure' ) }) t.test('missing package.json', async t => { - npm.prefix = t.testdir({ - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable missing name/version of top-level package' ) }) t.test('extraneous deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output containing problems info') + t.matchSnapshot(cleanCwd(result()), 'should output containing problems info') }) t.test('overridden dep', async t => { - config.all = true - config.long = true - t.teardown(() => { - config.all = false - config.long = false - }) - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-overridden', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - overrides: { - bar: '1.0.0', - }, - }), - node_modules: { - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - dependencies: { - bar: '^2.0.0', - }, - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { ...parseable, long: true }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-overridden', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + overrides: { + bar: '1.0.0', + }, + }), + node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + bar: '^2.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ + name: 'bar', + version: '1.0.0', + }), + }, }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should contain overridden outout') + t.matchSnapshot(cleanCwd(result()), 'should contain overridden outout') }) t.test('with filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['chai']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable contaning only occurrences of filtered by package' ) }) t.test('with filter arg nested dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['dog']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable contaning only occurrences of filtered package' ) }) t.test('with multiple filter args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - node_modules: { - ...simpleNmFixture.node_modules, - ipsum: { - 'package.json': JSON.stringify({ - name: 'ipsum', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + node_modules: { + ...simpleNmFixture.node_modules, + ipsum: { + 'package.json': JSON.stringify({ + name: 'ipsum', + version: '1.0.0', + }), + }, }, }, }) await ls.exec(['dog@*', 'chai@1.0.0']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), /* eslint-disable-next-line max-len */ 'should output parseable contaning only occurrences of multiple filtered packages and their ancestors' ) }) t.test('with missing filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['notadep']) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable output containing no dependencies info' ) }) t.test('default --depth value should be 0', async t => { - config.all = false - config.depth = undefined - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { ...parseable, all: false }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable output containing only top-level dependencies' ) - config.all = true - config.depth = Infinity }) t.test('--depth=0', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { ...parseable, all: false, depth: 0 }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing only top-level dependencies') - config.all = true - config.depth = Infinity + t.matchSnapshot(cleanCwd(result()), + 'should output tree containing only top-level dependencies') }) t.test('--depth=1', async t => { - config.all = false - config.depth = 1 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { ...parseable, all: false, depth: 1 }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable containing top-level deps and their deps only' ) - config.all = true - config.depth = Infinity }) t.test('missing/invalid/extraneous', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: parseable, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable containing top-level deps and their deps only' ) }) t.test('--dev', async t => { - flatOptions.omit = ['peer', 'prod', 'optional'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + omit: ['peer', 'prod', 'optional'], + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps') - flatOptions.omit = [] + t.matchSnapshot(cleanCwd(result()), 'should output tree containing dev deps') }) t.test('--link', async t => { - config.link = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - 'linked-dep': '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - 'linked-dep': { + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + link: true, + }, + prefixDir: { 'package.json': JSON.stringify({ - name: 'linked-dep', + name: 'test-npm-ls', version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + 'linked-dep': '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, }), - }, - node_modules: { - 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules, + 'linked-dep': { + 'package.json': JSON.stringify({ + name: 'linked-dep', + version: '1.0.0', + }), + }, + node_modules: { + 'linked-dep': t.fixture('symlink', '../linked-dep'), + ...diffDepTypesNmFixture.node_modules, + }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps') - config.link = false + t.matchSnapshot(cleanCwd(result()), 'should output tree containing linked deps') }) t.test('--production', async t => { - flatOptions.omit = ['dev', 'peer'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + omit: ['dev', 'peer'], + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing production deps') - flatOptions.omit = [] + t.matchSnapshot(cleanCwd(result()), 'should output tree containing production deps') }) t.test('--long', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions') - config.long = true + t.matchSnapshot(cleanCwd(result()), 'should output tree info with descriptions') }) t.test('--long with extraneous deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output long parseable output with extraneous info') + t.matchSnapshot(cleanCwd(result()), 'should output long parseable output with extraneous info') }) t.test('--long missing/invalid/extraneous', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable result containing EXTRANEOUS/INVALID labels' ) - config.long = false }) t.test('--long print symlink target location', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - 'linked-dep': '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - 'linked-dep': { + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + long: true, + }, + prefixDir: { 'package.json': JSON.stringify({ - name: 'linked-dep', + name: 'test-npm-ls', version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + 'linked-dep': '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, }), - }, - node_modules: { - 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules, + 'linked-dep': { + 'package.json': JSON.stringify({ + name: 'linked-dep', + version: '1.0.0', + }), + }, + node_modules: { + 'linked-dep': t.fixture('symlink', '../linked-dep'), + ...diffDepTypesNmFixture.node_modules, + }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output parseable results with symlink targets') - config.long = false + t.matchSnapshot(cleanCwd(result()), 'should output parseable results with symlink targets') }) t.test('--long --depth=0', async t => { - config.all = false - config.depth = 0 - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + all: false, + depth: 0, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output tree containing top-level deps with descriptions' ) - config.all = true - config.depth = Infinity - config.long = false }) t.test('json read problems', async t => { - npm.prefix = t.testdir({ - 'package.json': '{broken json', + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': '{broken json', + }, }) await t.rejects(ls.exec([]), { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error') - t.matchSnapshot(redactCwd(result), 'should print empty result') + t.matchSnapshot(cleanCwd(result()), 'should print empty result') }) t.test('empty location', async t => { - npm.prefix = t.testdir({}) + const { ls, result } = await mockLs(t, { + config: { + ...parseable, + }, + }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print empty result') + t.matchSnapshot(cleanCwd(result()), 'should print empty result') }) t.test('unmet peer dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^2.0.0', // mismatching version # - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^2.0.0', // mismatching version # + }, + }), + ...diffDepTypesNmFixture, + }, }) await t.rejects(ls.exec([])) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable signaling missing peer dep in problems' ) }) t.test('unmet optional dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0', // mismatching version # - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'missing-optional-dep': '^1.0.0', + 'optional-dep': '^2.0.0', // mismatching version # + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await t.rejects( ls.exec([]), @@ -2211,326 +2360,340 @@ t.test('ls --parseable', t => { 'should have invalid dep error msg' ) t.matchSnapshot( - redactCwd(result), + cleanCwd(result()), 'should output parseable with empty entry for missing optional deps' ) }) t.test('cycle deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), + const { result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + }, }, }, }) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print tree output omitting deduped ref') + t.matchSnapshot(cleanCwd(result()), 'should print tree output omitting deduped ref') }) t.test('using aliases', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: 'npm:b@1.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/a': { - name: 'b', - version: '1.0.0', - resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', - }, + const { npm, result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: 'npm:b@1.0.0', }, }), - a: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - _from: 'a@npm:b', - _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', - _requested: { - type: 'alias', + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/a': { + name: 'b', + version: '1.0.0', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + }, }, }), + a: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + _from: 'a@npm:b', + _resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + _requested: { + type: 'alias', + }, + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing aliases') + t.matchSnapshot(cleanCwd(result()), 'should output tree containing aliases') }) t.test('resolved points to git ref', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - abbrev: 'git+https://github.com/isaacs/abbrev-js.git', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/abbrev': { - name: 'abbrev', - version: '1.1.1', - /* eslint-disable-next-line max-len */ - resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - }, + const { npm, result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', }, }), - abbrev: { - 'package.json': JSON.stringify({ - name: 'abbrev', - version: '1.1.1', - _id: 'abbrev@1.1.1', - _from: 'git+https://github.com/isaacs/abbrev-js.git', - /* eslint-disable-next-line max-len */ - _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - _requested: { - type: 'git', - raw: 'git+https:github.com/isaacs/abbrev-js.git', - rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', - saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', - fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null, + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/abbrev': { + name: 'abbrev', + version: '1.1.1', + /* eslint-disable-next-line max-len */ + resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + }, }, }), + abbrev: { + 'package.json': JSON.stringify({ + name: 'abbrev', + version: '1.1.1', + _id: 'abbrev@1.1.1', + _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ + _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + _requested: { + type: 'git', + raw: 'git+https:github.com/isaacs/abbrev-js.git', + rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', + saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', + fetchSpec: 'https://github.com/isaacs/abbrev-js.git', + gitCommittish: null, + }, + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should output tree containing git refs') + t.matchSnapshot(cleanCwd(result()), 'should output tree containing git refs') }) t.test('from and resolved properties', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'simple-output': '^2.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/simple-output': { - name: 'simple-output', - version: '2.1.1', - resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', - shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - }, + const { npm, result, ls } = await mockLs(t, { + config: { + ...parseable, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'simple-output': '^2.0.0', }, }), - 'simple-output': { - 'package.json': JSON.stringify({ - name: 'simple-output', - version: '2.1.1', - _from: 'simple-output', - _id: 'simple-output@2.1.1', - _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', - _requested: { - type: 'tag', - registry: true, - raw: 'simple-output', - name: 'simple-output', - escapedName: 'simple-output', - rawSpec: '', - saveSpec: null, - fetchSpec: 'latest', + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/simple-output': { + name: 'simple-output', + version: '2.1.1', + resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', + }, }, - _requiredBy: ['#USER', '/'], - _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output', }), + 'simple-output': { + 'package.json': JSON.stringify({ + name: 'simple-output', + version: '2.1.1', + _from: 'simple-output', + _id: 'simple-output@2.1.1', + _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + _requested: { + type: 'tag', + registry: true, + raw: 'simple-output', + name: 'simple-output', + escapedName: 'simple-output', + rawSpec: '', + saveSpec: null, + fetchSpec: 'latest', + }, + _requiredBy: ['#USER', '/'], + _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', + _spec: 'simple-output', + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should not be printed in tree output') + t.matchSnapshot(cleanCwd(result()), 'should not be printed in tree output') }) t.test('global', async t => { - config.global = true - const fixtures = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - node_modules: { - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { ...parseable, global: true }, + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + node_modules: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, }, }, }, }, }) - // mimics lib/npm.js globalDir getter but pointing to fixtures - npm.globalDir = resolve(fixtures, 'node_modules') - await ls.exec([]) - t.matchSnapshot(redactCwd(result), 'should print parseable output for global deps') - npm.globalDir = 'MISSING_GLOBAL_DIR' - config.global = false + t.matchSnapshot(cleanCwd(result()), 'should print parseable output for global deps') }) - - t.end() }) t.test('ignore missing optional deps', async t => { - t.beforeEach(cleanUpResult) - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls-ignore-missing-optional', - version: '1.2.3', - peerDependencies: { - 'peer-ok': '1', - 'peer-missing': '1', - 'peer-wrong': '1', - 'peer-optional-ok': '1', - 'peer-optional-missing': '1', - 'peer-optional-wrong': '1', - }, - peerDependenciesMeta: { - 'peer-optional-ok': { - optional: true, - }, - 'peer-optional-missing': { - optional: true, - }, - 'peer-optional-wrong': { - optional: true, + const mock = async (t, config = {}) => { + const { result, ls } = await mockLs(t, { + config: config, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls-ignore-missing-optional', + version: '1.2.3', + peerDependencies: { + 'peer-ok': '1', + 'peer-missing': '1', + 'peer-wrong': '1', + 'peer-optional-ok': '1', + 'peer-optional-missing': '1', + 'peer-optional-wrong': '1', + }, + peerDependenciesMeta: { + 'peer-optional-ok': { + optional: true, + }, + 'peer-optional-missing': { + optional: true, + }, + 'peer-optional-wrong': { + optional: true, + }, + }, + optionalDependencies: { + 'optional-ok': '1', + 'optional-missing': '1', + 'optional-wrong': '1', + }, + dependencies: { + 'prod-ok': '1', + 'prod-missing': '1', + 'prod-wrong': '1', + }, + }), + node_modules: { + 'prod-ok': { + 'package.json': JSON.stringify({ name: 'prod-ok', version: '1.2.3' }), + }, + 'prod-wrong': { + 'package.json': JSON.stringify({ name: 'prod-wrong', version: '3.2.1' }), + }, + 'optional-ok': { + 'package.json': JSON.stringify({ name: 'optional-ok', version: '1.2.3' }), + }, + 'optional-wrong': { + 'package.json': JSON.stringify({ name: 'optional-wrong', version: '3.2.1' }), + }, + 'peer-optional-ok': { + 'package.json': JSON.stringify({ name: 'peer-optional-ok', version: '1.2.3' }), + }, + 'peer-optional-wrong': { + 'package.json': JSON.stringify({ name: 'peer-optional-wrong', version: '3.2.1' }), + }, + 'peer-ok': { + 'package.json': JSON.stringify({ name: 'peer-ok', version: '1.2.3' }), + }, + 'peer-wrong': { + 'package.json': JSON.stringify({ name: 'peer-wrong', version: '3.2.1' }), + }, }, }, - optionalDependencies: { - 'optional-ok': '1', - 'optional-missing': '1', - 'optional-wrong': '1', - }, - dependencies: { - 'prod-ok': '1', - 'prod-missing': '1', - 'prod-wrong': '1', - }, - }), - node_modules: { - 'prod-ok': { - 'package.json': JSON.stringify({ name: 'prod-ok', version: '1.2.3' }), - }, - 'prod-wrong': { - 'package.json': JSON.stringify({ name: 'prod-wrong', version: '3.2.1' }), - }, - 'optional-ok': { - 'package.json': JSON.stringify({ name: 'optional-ok', version: '1.2.3' }), - }, - 'optional-wrong': { - 'package.json': JSON.stringify({ name: 'optional-wrong', version: '3.2.1' }), - }, - 'peer-optional-ok': { - 'package.json': JSON.stringify({ name: 'peer-optional-ok', version: '1.2.3' }), - }, - 'peer-optional-wrong': { - 'package.json': JSON.stringify({ name: 'peer-optional-wrong', version: '3.2.1' }), - }, - 'peer-ok': { - 'package.json': JSON.stringify({ name: 'peer-ok', version: '1.2.3' }), - }, - 'peer-wrong': { - 'package.json': JSON.stringify({ name: 'peer-wrong', version: '3.2.1' }), - }, - }, - }) + }) + + await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) - config.all = true - const prefix = npm.prefix.toLowerCase().replace(/\\/g, '/') - const cleanupPaths = str => str.toLowerCase().replace(/\\/g, '/').split(prefix).join('{project}') + return config.json ? jsonParse(result()).problems : cleanCwd(result()) + } t.test('--json', async t => { - config.json = true - config.parseable = false - await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) - result = JSON.parse(result) - const problems = result.problems.map(cleanupPaths) - t.matchSnapshot(problems, 'ls --json problems') + const result = await mock(t, { json: true }) + t.matchSnapshot(result, 'ls --json problems') }) t.test('--parseable', async t => { - config.json = false - config.parseable = true - await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) - t.matchSnapshot(cleanupPaths(result), 'ls --parseable result') + const result = await mock(t, { parseable: true }) + t.matchSnapshot(result, 'ls --parseable result') }) t.test('human output', async t => { - config.json = false - config.parseable = false - await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }) - t.matchSnapshot(cleanupPaths(result), 'ls result') + const result = await mock(t) + t.matchSnapshot(result, 'ls result') }) }) -t.test('ls --json', t => { - t.beforeEach(cleanUpResult) - config.json = true - config.parseable = false +t.test('ls --json', async t => { + const json = { json: true } t.test('no args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2556,20 +2719,22 @@ t.test('ls --json', t => { }) t.test('missing package.json', async t => { - npm.prefix = t.testdir({ - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', - /* eslint-disable-next-line max-len */ - 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', - /* eslint-disable-next-line max-len */ - 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', + 'extraneous: dog@1.0.0 {CWD}/prefix/node_modules/dog', + 'extraneous: foo@1.0.0 {CWD}/prefix/node_modules/foo', ], dependencies: { dog: { @@ -2577,8 +2742,7 @@ t.test('ls --json', t => { extraneous: true, overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: dog@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/dog', + 'extraneous: dog@1.0.0 {CWD}/prefix/node_modules/dog', ], }, foo: { @@ -2586,8 +2750,7 @@ t.test('ls --json', t => { extraneous: true, overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/foo', + 'extraneous: foo@1.0.0 {CWD}/prefix/node_modules/foo', ], dependencies: { dog: { @@ -2600,8 +2763,7 @@ t.test('ls --json', t => { extraneous: true, overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-package.json/node_modules/chai', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', ], }, }, @@ -2611,24 +2773,29 @@ t.test('ls --json', t => { }) t.test('extraneous deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', problems: [ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-extraneous-deps/node_modules/chai', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', ], dependencies: { foo: { @@ -2646,8 +2813,7 @@ t.test('ls --json', t => { extraneous: true, overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-extraneous-deps/node_modules/chai', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', ], }, }, @@ -2657,40 +2823,43 @@ t.test('ls --json', t => { }) t.test('overridden dep', async t => { - config.all = true - t.teardown(() => config.all = false) - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-overridden', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - overrides: { - bar: '1.0.0', - }, - }), - node_modules: { - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - dependencies: { - bar: '^2.0.0', - }, - }), - }, - bar: { - 'package.json': JSON.stringify({ - name: 'bar', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-overridden', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + }, + overrides: { + bar: '1.0.0', + }, + }), + node_modules: { + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + bar: '^2.0.0', + }, + }), + }, + bar: { + 'package.json': JSON.stringify({ + name: 'bar', + version: '1.0.0', + }), + }, }, }, }) await ls.exec([]) - t.same(JSON.parse(result), { + t.same(JSON.parse(result()), { name: 'test-overridden', version: '1.0.0', dependencies: { @@ -2710,31 +2879,36 @@ t.test('ls --json', t => { t.test('missing deps --long', async t => { t.plan(3) - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - dog: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + dog: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]).catch(err => { t.equal( - redactCwd(err.message), + cleanCwd(err.message), 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', 'should log missing dep as error' ) t.equal(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code') }) t.match( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2742,24 +2916,28 @@ t.test('ls --json', t => { }, 'should output json containing problems info' ) - config.long = false }) t.test('with filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['chai']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2770,26 +2948,31 @@ t.test('ls --json', t => { }, }, }, - 'should output json contaning only occurrences of filtered by package' - ) - t.not(process.exitCode, 1, 'should not exit with error code 1') - }) - - t.test('with filter arg nested dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + 'should output json contaning only occurrences of filtered by package' + ) + t.not(process.exitCode, 1, 'should not exit with error code 1') + }) + + t.test('with filter arg nested dep', async t => { + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['dog']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2808,33 +2991,38 @@ t.test('ls --json', t => { }, 'should output json contaning only occurrences of filtered by package' ) - t.notOk(jsonParse(result).dependencies.chai) + t.notOk(jsonParse(result()).dependencies.chai) }) t.test('with multiple filter args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - node_modules: { - ...simpleNmFixture.node_modules, - ipsum: { - 'package.json': JSON.stringify({ - name: 'ipsum', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', + }, + }), + node_modules: { + ...simpleNmFixture.node_modules, + ipsum: { + 'package.json': JSON.stringify({ + name: 'ipsum', + version: '1.0.0', + }), + }, }, }, }) await ls.exec(['dog@*', 'chai@1.0.0']) t.same( - jsonParse(result), + jsonParse(result()), { version: '1.0.0', name: 'test-npm-ls', @@ -2861,20 +3049,25 @@ t.test('ls --json', t => { }) t.test('with missing filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec(['notadep']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2882,26 +3075,29 @@ t.test('ls --json', t => { 'should output json containing no dependencies info' ) t.equal(process.exitCode, 1, 'should exit with error code 1') - process.exitCode = 0 }) t.test('default --depth value should now be 0', async t => { - config.all = false - config.depth = undefined - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + all: false, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2918,27 +3114,30 @@ t.test('ls --json', t => { }, 'should output json containing only top-level dependencies' ) - config.all = true - config.depth = Infinity }) t.test('--depth=0', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + all: false, + depth: 0, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2955,27 +3154,30 @@ t.test('ls --json', t => { }, 'should output json containing only top-level dependencies' ) - config.all = true - config.depth = Infinity }) t.test('--depth=1', async t => { - config.all = false - config.depth = 1 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + all: false, + depth: 1, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -2998,33 +3200,34 @@ t.test('ls --json', t => { }, 'should output json containing top-level deps and their deps only' ) - config.all = true - config.depth = Infinity }) t.test('missing/invalid/extraneous', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - ...simpleNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', + }, + }), + ...simpleNmFixture, + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', - /* eslint-disable-next-line max-len */ - 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', ], dependencies: { @@ -3033,8 +3236,7 @@ t.test('ls --json', t => { invalid: '"^2.0.0" from the root project', overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/foo', + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', ], dependencies: { dog: { @@ -3048,8 +3250,7 @@ t.test('ls --json', t => { extraneous: true, overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'extraneous: chai@1.0.0 {CWD}/tap-testdir-ls-ls---json-missing-invalid-extraneous/node_modules/chai', + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', ], }, ipsum: { @@ -3058,36 +3259,50 @@ t.test('ls --json', t => { problems: ['missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'], }, }, + error: { + code: 'ELSPROBLEMS', + summary: [ + 'extraneous: chai@1.0.0 {CWD}/prefix/node_modules/chai', + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + ].join('\n'), + detail: '', + }, }, 'should output json containing top-level deps and their deps only' ) }) t.test('--dev', async t => { - flatOptions.omit = ['prod', 'optional', 'peer'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + omit: ['prod', 'optional', 'peer'], + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3112,44 +3327,48 @@ t.test('ls --json', t => { }, 'should output json containing dev deps' ) - flatOptions.omit = [] }) t.test('--link', async t => { - config.link = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - 'linked-dep': '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - 'linked-dep': { + const { result, ls } = await mockLs(t, { + config: { + ...json, + link: true, + }, + prefixDir: { 'package.json': JSON.stringify({ - name: 'linked-dep', + name: 'test-npm-ls', version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + 'linked-dep': '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, }), - }, - node_modules: { - 'linked-dep': t.fixture('symlink', '../linked-dep'), - ...diffDepTypesNmFixture.node_modules, + 'linked-dep': { + 'package.json': JSON.stringify({ + name: 'linked-dep', + version: '1.0.0', + }), + }, + node_modules: { + 'linked-dep': t.fixture('symlink', '../linked-dep'), + ...diffDepTypesNmFixture.node_modules, + }, }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3163,34 +3382,38 @@ t.test('ls --json', t => { }, 'should output json containing linked deps' ) - config.link = false }) t.test('--production', async t => { - flatOptions.omit = ['dev', 'peer'] - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + omit: ['dev', 'peer'], + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3217,118 +3440,122 @@ t.test('ls --json', t => { }, 'should output json containing production deps' ) - flatOptions.omit = [] }) t.test('from lockfile', async t => { - npm.prefix = t.testdir({ - node_modules: { - '@isaacs': { - 'dedupe-tests-a': { - 'package.json': JSON.stringify({ + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + node_modules: { + '@isaacs': { + 'dedupe-tests-a': { + 'package.json': JSON.stringify({ + name: '@isaacs/dedupe-tests-a', + version: '1.0.1', + }), + node_modules: { + '@isaacs': { + 'dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '1.0.0', + }, + }, + }, + }, + 'dedupe-tests-b': { + 'package.json': JSON.stringify({ + name: '@isaacs/dedupe-tests-b', + version: '2.0.0', + }), + }, + }, + }, + 'package-lock.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', + }, + }, + 'node_modules/@isaacs/dedupe-tests-a': { name: '@isaacs/dedupe-tests-a', version: '1.0.1', - }), - node_modules: { - '@isaacs': { - 'dedupe-tests-b': { - name: '@isaacs/dedupe-tests-b', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + dependencies: { + '@isaacs/dedupe-tests-b': '1', + }, + }, + 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '1.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + }, + 'node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '2.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, + }, + dependencies: { + '@isaacs/dedupe-tests-a': { + version: '1.0.1', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + requires: { + '@isaacs/dedupe-tests-b': '1', + }, + dependencies: { + '@isaacs/dedupe-tests-b': { version: '1.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, }, }, - }, - 'dedupe-tests-b': { - 'package.json': JSON.stringify({ - name: '@isaacs/dedupe-tests-b', + '@isaacs/dedupe-tests-b': { version: '2.0.0', - }), - }, - }, - }, - 'package-lock.json': JSON.stringify({ - name: 'dedupe-lockfile', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - packages: { - '': { - name: 'dedupe-lockfile', - version: '1.0.0', - dependencies: { - '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2', - }, - }, - 'node_modules/@isaacs/dedupe-tests-a': { - name: '@isaacs/dedupe-tests-a', - version: '1.0.1', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - dependencies: { - '@isaacs/dedupe-tests-b': '1', - }, - }, - 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { - name: '@isaacs/dedupe-tests-b', - version: '1.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', - }, - 'node_modules/@isaacs/dedupe-tests-b': { - name: '@isaacs/dedupe-tests-b', - version: '2.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', - }, - }, - dependencies: { - '@isaacs/dedupe-tests-a': { - version: '1.0.1', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - requires: { - '@isaacs/dedupe-tests-b': '1', - }, - dependencies: { - '@isaacs/dedupe-tests-b': { - version: '1.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', - }, + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', }, }, - '@isaacs/dedupe-tests-b': { - version: '2.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }), + 'package.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', }, - }, - }), - 'package.json': JSON.stringify({ - name: 'dedupe-lockfile', - version: '1.0.0', - dependencies: { - '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2', - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { version: '1.0.0', name: 'dedupe-lockfile', @@ -3346,7 +3573,7 @@ t.test('ls --json', t => { overridden: false, problems: [ /* eslint-disable-next-line max-len */ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/prefix/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, }, @@ -3360,7 +3587,7 @@ t.test('ls --json', t => { }, problems: [ /* eslint-disable-next-line max-len */ - 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/tap-testdir-ls-ls---json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', + 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/prefix/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b', ], }, 'should output json containing only prod deps' @@ -3368,30 +3595,35 @@ t.test('ls --json', t => { }) t.test('--long', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3405,7 +3637,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/peer-dep', + path: '{CWD}/prefix/node_modules/peer-dep', extraneous: false, }, 'dev-dep': { @@ -3427,7 +3659,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/dog', + path: '{CWD}/prefix/node_modules/dog', extraneous: false, }, }, @@ -3435,7 +3667,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: { dog: '^1.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/foo', + path: '{CWD}/prefix/node_modules/foo', extraneous: false, }, }, @@ -3443,7 +3675,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/dev-dep', + path: '{CWD}/prefix/node_modules/dev-dep', extraneous: false, }, chai: { @@ -3454,7 +3686,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/chai', + path: '{CWD}/prefix/node_modules/chai', extraneous: false, }, 'optional-dep': { @@ -3466,7 +3698,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/optional-dep', + path: '{CWD}/prefix/node_modules/optional-dep', extraneous: false, }, 'prod-dep': { @@ -3484,8 +3716,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - /* eslint-disable-next-line max-len */ - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/prod-dep/node_modules/dog', + path: '{CWD}/prefix/node_modules/prod-dep/node_modules/dog', extraneous: false, }, }, @@ -3493,7 +3724,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: { dog: '^2.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long/node_modules/prod-dep', + path: '{CWD}/prefix/node_modules/prod-dep', extraneous: false, }, }, @@ -3502,41 +3733,45 @@ t.test('ls --json', t => { peerDependencies: { 'peer-dep': '^1.0.0' }, _id: 'test-npm-ls@1.0.0', _dependencies: { 'prod-dep': '^1.0.0', chai: '^1.0.0', 'optional-dep': '^1.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long', + path: '{CWD}/prefix', extraneous: false, }, 'should output long json info' ) - config.long = true }) t.test('--long --depth=0', async t => { - config.all = false - config.depth = 0 - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + all: false, + depth: 0, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3550,7 +3785,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/peer-dep', + path: '{CWD}/prefix/node_modules/peer-dep', extraneous: false, }, 'dev-dep': { @@ -3562,7 +3797,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: { foo: '^1.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/dev-dep', + path: '{CWD}/prefix/node_modules/dev-dep', extraneous: false, }, chai: { @@ -3573,7 +3808,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/chai', + path: '{CWD}/prefix/node_modules/chai', extraneous: false, }, 'optional-dep': { @@ -3585,7 +3820,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: {}, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/optional-dep', + path: '{CWD}/prefix/node_modules/optional-dep', extraneous: false, }, 'prod-dep': { @@ -3597,7 +3832,7 @@ t.test('ls --json', t => { devDependencies: {}, peerDependencies: {}, _dependencies: { dog: '^2.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0/node_modules/prod-dep', + path: '{CWD}/prefix/node_modules/prod-dep', extraneous: false, }, }, @@ -3606,19 +3841,21 @@ t.test('ls --json', t => { peerDependencies: { 'peer-dep': '^1.0.0' }, _id: 'test-npm-ls@1.0.0', _dependencies: { 'prod-dep': '^1.0.0', chai: '^1.0.0', 'optional-dep': '^1.0.0' }, - path: '{CWD}/tap-testdir-ls-ls---json---long---depth-0', + path: '{CWD}/prefix', extraneous: false, }, 'should output json containing top-level deps in long format' ) - config.all = true - config.depth = Infinity - config.long = false }) t.test('json read problems', async t => { - npm.prefix = t.testdir({ - 'package.json': '{broken json', + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': '{broken json', + }, }) await t.rejects( ls.exec([]), @@ -3626,54 +3863,65 @@ t.test('ls --json', t => { 'should have missin root package.json msg' ) t.same( - jsonParse(result), + jsonParse(result()), { invalid: true, problems: [ - /* eslint-disable-next-line max-len */ - 'error in {CWD}/tap-testdir-ls-ls---json-json-read-problems: Failed to parse root package.json', + 'error in {CWD}/prefix: Failed to parse root package.json', ], + error: { + code: 'EJSONPARSE', + summary: 'Failed to parse root package.json', + detail: [ + 'Failed to parse JSON data.', + 'Note: package.json must be actual JSON, not just JavaScript.', + ].join('\n'), + }, }, 'should print empty json result' ) }) t.test('empty location', async t => { - npm.prefix = t.testdir({}) + const { ls, result } = await mockLs(t, { config: json }) await ls.exec([]) - t.same(jsonParse(result), {}, 'should print empty json result') + t.same(jsonParse(result()), {}, 'should print empty json result') }) t.test('unmet peer dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'optional-dep': '^1.0.0', - }, - peerDependencies: { - 'peer-dep': '^2.0.0', // mismatching version # - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'optional-dep': '^1.0.0', + }, + peerDependencies: { + 'peer-dep': '^2.0.0', // mismatching version # + }, + }), + ...diffDepTypesNmFixture, + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'Should have ELSPROBLEMS error code') t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', + 'invalid: peer-dep@1.0.0 {CWD}/prefix/node_modules/peer-dep', ], dependencies: { 'peer-dep': { @@ -3681,8 +3929,7 @@ t.test('ls --json', t => { invalid: '"^2.0.0" from the root project', overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: peer-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-peer-dep/node_modules/peer-dep', + 'invalid: peer-dep@1.0.0 {CWD}/prefix/node_modules/peer-dep', ], }, 'dev-dep': { @@ -3720,32 +3967,42 @@ t.test('ls --json', t => { }, }, }, + error: { + code: 'ELSPROBLEMS', + summary: 'invalid: peer-dep@1.0.0 {CWD}/prefix/node_modules/peer-dep', + detail: '', + }, }, 'should output json signaling missing peer dep in problems' ) }) t.test('unmet optional dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'prod-dep': '^1.0.0', - chai: '^1.0.0', - }, - devDependencies: { - 'dev-dep': '^1.0.0', - }, - optionalDependencies: { - 'missing-optional-dep': '^1.0.0', - 'optional-dep': '^2.0.0', // mismatching version # - }, - peerDependencies: { - 'peer-dep': '^1.0.0', - }, - }), - ...diffDepTypesNmFixture, + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'prod-dep': '^1.0.0', + chai: '^1.0.0', + }, + devDependencies: { + 'dev-dep': '^1.0.0', + }, + optionalDependencies: { + 'missing-optional-dep': '^1.0.0', + 'optional-dep': '^2.0.0', // mismatching version # + }, + peerDependencies: { + 'peer-dep': '^1.0.0', + }, + }), + ...diffDepTypesNmFixture, + }, }) await t.rejects( ls.exec([]), @@ -3753,13 +4010,13 @@ t.test('ls --json', t => { 'should have invalid dep error msg' ) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems + // mismatching optional deps get flagged in problems + 'invalid: optional-dep@1.0.0 {CWD}/prefix/node_modules/optional-dep', ], dependencies: { 'optional-dep': { @@ -3767,8 +4024,7 @@ t.test('ls --json', t => { invalid: '"^2.0.0" from the root project', overridden: false, problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: optional-dep@1.0.0 {CWD}/tap-testdir-ls-ls---json-unmet-optional-dep/node_modules/optional-dep', + 'invalid: optional-dep@1.0.0 {CWD}/prefix/node_modules/optional-dep', ], }, 'peer-dep': { @@ -3807,44 +4063,54 @@ t.test('ls --json', t => { }, 'missing-optional-dep': {}, // missing optional dep has an empty entry in json output }, + error: { + code: 'ELSPROBLEMS', + summary: 'invalid: optional-dep@1.0.0 {CWD}/prefix/node_modules/optional-dep', + detail: '', + }, }, 'should output json with empty entry for missing optional deps' ) }) t.test('cycle deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - a: '^1.0.0', - }, - }), + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + a: '^1.0.0', + }, + }), + }, }, }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3871,40 +4137,45 @@ t.test('ls --json', t => { }) t.test('using aliases', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: 'npm:b@1.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/a': { - name: 'b', - version: '1.0.0', - from: 'a@npm:b', - resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', - requested: { - type: 'alias', - }, - }, + const { npm, result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: 'npm:b@1.0.0', }, }), - a: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/a': { + name: 'b', + version: '1.0.0', + from: 'a@npm:b', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz', + requested: { + type: 'alias', + }, + }, + }, }), + a: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3921,51 +4192,56 @@ t.test('ls --json', t => { }) t.test('resolved points to git ref', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - abbrev: 'git+https://github.com/isaacs/abbrev-js.git', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/abbrev': { - name: 'abbrev', - version: '1.1.1', - id: 'abbrev@1.1.1', - from: 'git+https://github.com/isaacs/abbrev-js.git', - /* eslint-disable-next-line max-len */ - resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - }, + const { npm, result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', }, }), - abbrev: { - 'package.json': JSON.stringify({ - name: 'abbrev', - version: '1.1.1', - _id: 'abbrev@1.1.1', - _from: 'git+https://github.com/isaacs/abbrev-js.git', - /* eslint-disable-next-line max-len */ - _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - _requested: { - type: 'git', - raw: 'git+https:github.com/isaacs/abbrev-js.git', - rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', - saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', - fetchSpec: 'https://github.com/isaacs/abbrev-js.git', - gitCommittish: null, + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/abbrev': { + name: 'abbrev', + version: '1.1.1', + id: 'abbrev@1.1.1', + from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ + resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + }, }, }), + abbrev: { + 'package.json': JSON.stringify({ + name: 'abbrev', + version: '1.1.1', + _id: 'abbrev@1.1.1', + _from: 'git+https://github.com/isaacs/abbrev-js.git', + /* eslint-disable-next-line max-len */ + _resolved: 'git+https://github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + _requested: { + type: 'git', + raw: 'git+https:github.com/isaacs/abbrev-js.git', + rawSpec: 'git+https:github.com/isaacs/abbrev-js.git', + saveSpec: 'git+https://github.com/isaacs/abbrev-js.git', + fetchSpec: 'https://github.com/isaacs/abbrev-js.git', + gitCommittish: null, + }, + }), + }, }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -3983,18 +4259,45 @@ t.test('ls --json', t => { }) t.test('from and resolved properties', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - 'simple-output': '^2.0.0', - }, - }), - node_modules: { - '.package-lock.json': JSON.stringify({ - packages: { - 'node_modules/simple-output': { + const { npm, result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + 'simple-output': '^2.0.0', + }, + }), + node_modules: { + '.package-lock.json': JSON.stringify({ + packages: { + 'node_modules/simple-output': { + name: 'simple-output', + version: '2.1.1', + _from: 'simple-output', + _id: 'simple-output@2.1.1', + _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', + _requested: { + type: 'tag', + registry: true, + raw: 'simple-output', + name: 'simple-output', + escapedName: 'simple-output', + rawSpec: '', + saveSpec: null, + fetchSpec: 'latest', + }, + _requiredBy: ['#USER', '/'], + _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', + _spec: 'simple-output', + }, + }, + }), + 'simple-output': { + 'package.json': JSON.stringify({ name: 'simple-output', version: '2.1.1', _from: 'simple-output', @@ -4013,37 +4316,15 @@ t.test('ls --json', t => { _requiredBy: ['#USER', '/'], _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', _spec: 'simple-output', - }, + }), }, - }), - 'simple-output': { - 'package.json': JSON.stringify({ - name: 'simple-output', - version: '2.1.1', - _from: 'simple-output', - _id: 'simple-output@2.1.1', - _resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz', - _requested: { - type: 'tag', - registry: true, - raw: 'simple-output', - name: 'simple-output', - escapedName: 'simple-output', - rawSpec: '', - saveSpec: null, - fetchSpec: 'latest', - }, - _requiredBy: ['#USER', '/'], - _shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc', - _spec: 'simple-output', - }), }, }, }) touchHiddenPackageLock(npm.prefix) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4060,57 +4341,64 @@ t.test('ls --json', t => { }) t.test('node.name fallback if missing root package name', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + version: '1.0.0', + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { version: '1.0.0', - name: 'tap-testdir-ls-ls---json-node.name-fallback-if-missing-root-package-name', + name: 'prefix', }, 'should use node.name as key in json result obj' ) }) t.test('global', async t => { - config.global = true - const fixtures = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - node_modules: { - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - }), + const { result, ls } = await mockLs(t, { + config: { + ...json, + global: true, + }, + globalPrefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), + node_modules: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + }), + }, }, }, }, }, }) - // mimics lib/npm.js globalDir getter but pointing to fixtures - npm.globalDir = resolve(fixtures, 'node_modules') - await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { - name: 'tap-testdir-ls-ls---json-global', + name: process.platform === 'win32' ? 'global' : 'lib', dependencies: { a: { version: '1.0.0', @@ -4130,98 +4418,99 @@ t.test('ls --json', t => { }, 'should print json output for global deps' ) - npm.globalDir = 'MISSING_GLOBAL_DIR' - config.global = false }) - - t.end() }) t.test('show multiple invalid reasons', async t => { - config.json = false - config.all = true - config.depth = Infinity - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - cat: '^2.0.0', - dog: '^1.2.3', - }, - }), - node_modules: { - cat: { - 'package.json': JSON.stringify({ - name: 'cat', - version: '1.0.0', - dependencies: { - dog: '^2.0.0', - }, - }), - }, - dog: { - 'package.json': JSON.stringify({ - name: 'dog', - version: '1.0.0', - dependencies: { - cat: '', - }, - }), - }, - chai: { - 'package.json': JSON.stringify({ - name: 'chai', - version: '1.0.0', - dependencies: { - dog: '2.x', - }, - }), + const { result, ls } = await mockLs(t, { + config: {}, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + cat: '^2.0.0', + dog: '^1.2.3', + }, + }), + node_modules: { + cat: { + 'package.json': JSON.stringify({ + name: 'cat', + version: '1.0.0', + dependencies: { + dog: '^2.0.0', + }, + }), + }, + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '1.0.0', + dependencies: { + cat: '', + }, + }), + }, + chai: { + 'package.json': JSON.stringify({ + name: 'chai', + version: '1.0.0', + dependencies: { + dog: '2.x', + }, + }), + }, }, }, }) - const cleanupPaths = str => redactCwd(str).toLowerCase().replace(/\\/g, '/') await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') - t.matchSnapshot(cleanupPaths(result), 'ls result') + t.matchSnapshot(cleanCwd(result()), 'ls result') }) -t.test('ls --package-lock-only', t => { - config['package-lock-only'] = true - t.test('ls --package-lock-only --json', t => { - t.beforeEach(cleanUpResult) - config.json = true - config.parseable = false +t.test('ls --package-lock-only', async t => { + const lock = { 'package-lock-only': true } + + t.test('ls --package-lock-only --json', async t => { + const json = { json: true } + t.test('no args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4247,34 +4536,40 @@ t.test('ls --package-lock-only', t => { }) t.test('extraneous deps', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4289,90 +4584,101 @@ t.test('ls --package-lock-only', t => { }, }, }, - }, - }, - 'should output json containing no problem info' - ) - }) - - t.test('missing deps --long', async t => { - config.long = true - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - dog: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', - }, - chai: { - version: '1.0.0', + }, + }, + 'should output json containing no problem info' + ) + }) + + t.test('missing deps --long', async t => { + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + long: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + dog: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', }, - ipsum: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.match( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', }, 'should output json containing no problems info' ) - config.long = false }) t.test('with filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', - }, - chai: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - ipsum: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec(['chai']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4385,42 +4691,48 @@ t.test('ls --package-lock-only', t => { }, 'should output json contaning only occurrences of filtered by package' ) - t.equal(process.exitCode, 0, 'should exit with error code 0') + t.notOk(process.exitCode, 'should not set exit code') }) t.test('with filter arg nested dep', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', - }, - chai: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - ipsum: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec(['dog']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4442,39 +4754,45 @@ t.test('ls --package-lock-only', t => { }) t.test('with multiple filter args', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - ipsum: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', - }, - chai: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + ipsum: '^1.0.0', }, - ipsum: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, + ipsum: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec(['dog@*', 'chai@1.0.0']) t.same( - jsonParse(result), + jsonParse(result()), { version: '1.0.0', name: 'test-npm-ls', @@ -4501,35 +4819,41 @@ t.test('ls --package-lock-only', t => { }) t.test('with missing filter arg', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec(['notadep']) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4537,41 +4861,45 @@ t.test('ls --package-lock-only', t => { 'should output json containing no dependencies info' ) t.equal(process.exitCode, 1, 'should exit with error code 1') - process.exitCode = 0 }) t.test('default --depth value should now be 0', async t => { - config.all = false - config.depth = undefined - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + all: false, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4588,42 +4916,46 @@ t.test('ls --package-lock-only', t => { }, 'should output json containing only top-level dependencies' ) - config.all = true - config.depth = Infinity }) t.test('--depth=0', async t => { - config.all = false - config.depth = 0 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + depth: 0, + all: false, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', + }, + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', }, }, - dog: { - version: '1.0.0', - }, - chai: { - version: '1.0.0', - }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4640,42 +4972,46 @@ t.test('ls --package-lock-only', t => { }, 'should output json containing only top-level dependencies' ) - config.all = true - config.depth = Infinity }) t.test('--depth=1', async t => { - config.all = false - config.depth = 1 - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^1.0.0', - chai: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + all: false, + depth: 1, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^1.0.0', + chai: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4698,46 +5034,49 @@ t.test('ls --package-lock-only', t => { }, 'should output json containing top-level deps and their deps only' ) - config.all = true - config.depth = Infinity }) t.test('missing/invalid/extraneous', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - foo: '^2.0.0', - ipsum: '^1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - foo: { - version: '1.0.0', - requires: { - dog: '^1.0.0', - }, - }, - dog: { - version: '1.0.0', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + foo: '^2.0.0', + ipsum: '^1.0.0', }, - chai: { - version: '1.0.0', + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + foo: { + version: '1.0.0', + requires: { + dog: '^1.0.0', + }, + }, + dog: { + version: '1.0.0', + }, + chai: { + version: '1.0.0', + }, }, - }, - }), + }), + }, }) await t.rejects(ls.exec([]), { code: 'ELSPROBLEMS' }, 'should list dep problems') t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', ], dependencies: { @@ -4746,8 +5085,7 @@ t.test('ls --package-lock-only', t => { overridden: false, invalid: '"^2.0.0" from the root project', problems: [ - /* eslint-disable-next-line max-len */ - 'invalid: foo@1.0.0 {CWD}/tap-testdir-ls-ls---package-lock-only-ls---package-lock-only---json-missing-invalid-extraneous/node_modules/foo', + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', ], dependencies: { dog: { @@ -4762,96 +5100,110 @@ t.test('ls --package-lock-only', t => { problems: ['missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'], }, }, + error: { + code: 'ELSPROBLEMS', + summary: [ + 'invalid: foo@1.0.0 {CWD}/prefix/node_modules/foo', + 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0', + ].join('\n'), + detail: '', + }, }, 'should output json containing top-level deps and their deps only' ) }) t.test('from lockfile', async t => { - npm.prefix = t.testdir({ - 'package-lock.json': JSON.stringify({ - name: 'dedupe-lockfile', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - packages: { - '': { - name: 'dedupe-lockfile', - version: '1.0.0', - dependencies: { - '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package-lock.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', + }, }, - }, - 'node_modules/@isaacs/dedupe-tests-a': { - name: '@isaacs/dedupe-tests-a', - version: '1.0.1', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - dependencies: { - '@isaacs/dedupe-tests-b': '1', + 'node_modules/@isaacs/dedupe-tests-a': { + name: '@isaacs/dedupe-tests-a', + version: '1.0.1', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + dependencies: { + '@isaacs/dedupe-tests-b': '1', + }, }, - }, - 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { - name: '@isaacs/dedupe-tests-b', - version: '1.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', - }, - 'node_modules/@isaacs/dedupe-tests-b': { - name: '@isaacs/dedupe-tests-b', - version: '2.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', - }, - }, - dependencies: { - '@isaacs/dedupe-tests-a': { - version: '1.0.1', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', - requires: { - '@isaacs/dedupe-tests-b': '1', + 'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '1.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', }, - dependencies: { - '@isaacs/dedupe-tests-b': { - version: '1.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + 'node_modules/@isaacs/dedupe-tests-b': { + name: '@isaacs/dedupe-tests-b', + version: '2.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, + }, + dependencies: { + '@isaacs/dedupe-tests-a': { + version: '1.0.1', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==', + requires: { + '@isaacs/dedupe-tests-b': '1', + }, + dependencies: { + '@isaacs/dedupe-tests-b': { + version: '1.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==', + }, }, }, + '@isaacs/dedupe-tests-b': { + version: '2.0.0', + /* eslint-disable-next-line max-len */ + resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', + /* eslint-disable-next-line max-len */ + integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }, }, - '@isaacs/dedupe-tests-b': { - version: '2.0.0', - /* eslint-disable-next-line max-len */ - resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz', - /* eslint-disable-next-line max-len */ - integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==', + }), + 'package.json': JSON.stringify({ + name: 'dedupe-lockfile', + version: '1.0.0', + dependencies: { + '@isaacs/dedupe-tests-a': '1.0.1', + '@isaacs/dedupe-tests-b': '1||2', }, - }, - }), - 'package.json': JSON.stringify({ - name: 'dedupe-lockfile', - version: '1.0.0', - dependencies: { - '@isaacs/dedupe-tests-a': '1.0.1', - '@isaacs/dedupe-tests-b': '1||2', - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { version: '1.0.0', name: 'dedupe-lockfile', @@ -4883,26 +5235,32 @@ t.test('ls --package-lock-only', t => { }) t.test('using aliases', async t => { - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - a: 'npm:b@1.0.0', - }, - }), - 'package-lock.json': JSON.stringify({ - dependencies: { - a: { - version: 'npm:b@1.0.0', - resolved: 'https://localhost:8080/abbrev/-/abbrev-1.0.0.tgz', + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + a: 'npm:b@1.0.0', }, - }, - }), + }), + 'package-lock.json': JSON.stringify({ + dependencies: { + a: { + version: 'npm:b@1.0.0', + resolved: 'https://localhost:8080/abbrev/-/abbrev-1.0.0.tgz', + }, + }, + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4919,32 +5277,38 @@ t.test('ls --package-lock-only', t => { }) t.test('resolved points to git ref', async t => { - config.long = false - npm.prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - dependencies: { - abbrev: 'git+https://github.com/isaacs/abbrev-js.git', - }, - }), - 'package-lock.json': JSON.stringify({ - name: 'test-npm-ls', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - dependencies: { - abbrev: { + const { result, ls } = await mockLs(t, { + config: { + ...lock, + ...json, + long: false, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + dependencies: { + abbrev: 'git+https://github.com/isaacs/abbrev-js.git', + }, + }), + 'package-lock.json': JSON.stringify({ + name: 'test-npm-ls', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + dependencies: { + abbrev: { /* eslint-disable-next-line max-len */ - version: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', - from: 'abbrev@git+https://github.com/isaacs/abbrev-js.git', + version: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c', + from: 'abbrev@git+https://github.com/isaacs/abbrev-js.git', + }, }, - }, - }), + }), + }, }) await ls.exec([]) t.same( - jsonParse(result), + jsonParse(result()), { name: 'test-npm-ls', version: '1.0.0', @@ -4959,9 +5323,5 @@ t.test('ls --package-lock-only', t => { 'should output json containing git refs' ) }) - - t.end() }) - - t.end() }) diff --git a/deps/npm/test/lib/commands/org.js b/deps/npm/test/lib/commands/org.js index cd25fc23aa3344..d3700304328eea 100644 --- a/deps/npm/test/lib/commands/org.js +++ b/deps/npm/test/lib/commands/org.js @@ -1,53 +1,56 @@ const t = require('tap') const ansiTrim = require('../../../lib/utils/ansi-trim.js') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const output = [] -const npm = mockNpm({ - flatOptions: { - json: false, - parseable: false, - }, - config: { - loglevel: 'info', - }, - output: msg => { - output.push(msg) - }, -}) +const mockNpm = require('../../fixtures/mock-npm') + +const mockOrg = async (t, { orgSize = 1, orgList = {}, ...npmOpts } = {}) => { + let setArgs = null + let rmArgs = null + let lsArgs = null + + const libnpmorg = { + set: async (org, user, role, opts) => { + setArgs = { org, user, role, opts } + return { + org: { + name: org, + size: orgSize, + }, + user, + role, + } + }, + rm: async (org, user, opts) => { + rmArgs = { org, user, opts } + }, + ls: async (org, opts) => { + lsArgs = { org, opts } + return orgList + }, + } -let orgSize = 1 -let orgSetArgs = null -let orgRmArgs = null -let orgLsArgs = null -let orgList = {} -const libnpmorg = { - set: async (org, user, role, opts) => { - orgSetArgs = { org, user, role, opts } - return { - org: { - name: org, - size: orgSize, - }, - user, - role, - } - }, - rm: async (org, user, opts) => { - orgRmArgs = { org, user, opts } - }, - ls: async (org, opts) => { - orgLsArgs = { org, opts } - return orgList - }, -} + const mock = await mockNpm(t, { + ...npmOpts, + mocks: { + libnpmorg, + ...npmOpts.mocks, + }, + }) -const Org = t.mock('../../../lib/commands/org.js', { - libnpmorg, -}) -const org = new Org(npm) + return { + ...mock, + org: { + exec: (args) => mock.npm.exec('org', args), + completion: (arg) => mock.npm.cmd('org').then(c => c.completion(arg)), + usage: () => mock.npm.cmd('org').then(c => c.usage), + }, + setArgs: () => setArgs, + rmArgs: () => rmArgs, + lsArgs: () => lsArgs, + } +} t.test('completion', async t => { + const { org } = await mockOrg(t) const completion = argv => org.completion({ conf: { argv: { remain: argv } } }) const assertions = [ @@ -73,19 +76,17 @@ t.test('completion', async t => { }) t.test('npm org - invalid subcommand', async t => { - await t.rejects(org.exec(['foo']), org.usage) + const { org } = await mockOrg(t) + await t.rejects(org.exec(['foo']), org.usage()) }) t.test('npm org add', async t => { - t.teardown(() => { - orgSetArgs = null - output.length = 0 - }) + const { npm, org, setArgs, outputs } = await mockOrg(t) await org.exec(['add', 'orgname', 'username']) t.match( - orgSetArgs, + setArgs(), { org: 'orgname', user: 'username', @@ -95,17 +96,14 @@ t.test('npm org add', async t => { 'received the correct arguments' ) t.equal( - output[0], + outputs[0][0], 'Added username as developer to orgname. You now have 1 member in this org.', 'printed the correct output' ) }) t.test('npm org add - no org', async t => { - t.teardown(() => { - orgSetArgs = null - output.length = 0 - }) + const { org } = await mockOrg(t) await t.rejects( org.exec(['add', '', 'username']), @@ -115,11 +113,7 @@ t.test('npm org add - no org', async t => { }) t.test('npm org add - no user', async t => { - t.teardown(() => { - orgSetArgs = null - output.length = 0 - }) - + const { org } = await mockOrg(t) await t.rejects( org.exec(['add', 'orgname', '']), /`username` is required/, @@ -128,11 +122,7 @@ t.test('npm org add - no user', async t => { }) t.test('npm org add - invalid role', async t => { - t.teardown(() => { - orgSetArgs = null - output.length = 0 - }) - + const { org } = await mockOrg(t) await t.rejects( org.exec(['add', 'orgname', 'username', 'person']), /`role` must be one of/, @@ -141,16 +131,12 @@ t.test('npm org add - invalid role', async t => { }) t.test('npm org add - more users', async t => { - orgSize = 5 - t.teardown(() => { - orgSize = 1 - orgSetArgs = null - output.length = 0 - }) + const orgSize = 5 + const { npm, org, outputs, setArgs } = await mockOrg(t, { orgSize }) await org.exec(['add', 'orgname', 'username']) t.match( - orgSetArgs, + setArgs(), { org: 'orgname', user: 'username', @@ -160,24 +146,21 @@ t.test('npm org add - more users', async t => { 'received the correct arguments' ) t.equal( - output[0], + outputs[0][0], 'Added username as developer to orgname. You now have 5 members in this org.', 'printed the correct output' ) }) t.test('npm org add - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - orgSetArgs = null - output.length = 0 + const { npm, org, outputs, setArgs } = await mockOrg(t, { + config: { json: true }, }) await org.exec(['add', 'orgname', 'username']) t.match( - orgSetArgs, + setArgs(), { org: 'orgname', user: 'username', @@ -187,7 +170,7 @@ t.test('npm org add - json output', async t => { 'received the correct arguments' ) t.strictSame( - JSON.parse(output[0]), + JSON.parse(outputs[0]), { org: { name: 'orgname', @@ -201,17 +184,15 @@ t.test('npm org add - json output', async t => { }) t.test('npm org add - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - orgSetArgs = null - output.length = 0 + const config = { parseable: true } + const { npm, org, outputs, setArgs } = await mockOrg(t, { + config, }) await org.exec(['add', 'orgname', 'username']) t.match( - orgSetArgs, + setArgs(), { org: 'orgname', user: 'username', @@ -221,7 +202,7 @@ t.test('npm org add - parseable output', async t => { 'received the correct arguments' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['org', 'orgsize', 'user', 'role'], ['orgname', '1', 'username', 'developer'], @@ -231,17 +212,15 @@ t.test('npm org add - parseable output', async t => { }) t.test('npm org add - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - orgSetArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, org, outputs, setArgs } = await mockOrg(t, { + config, }) await org.exec(['add', 'orgname', 'username']) t.match( - orgSetArgs, + setArgs(), { org: 'orgname', user: 'username', @@ -250,20 +229,16 @@ t.test('npm org add - silent output', async t => { }, 'received the correct arguments' ) - t.strictSame(output, [], 'prints no output') + t.strictSame(outputs, [], 'prints no output') }) t.test('npm org rm', async t => { - t.teardown(() => { - orgRmArgs = null - orgLsArgs = null - output.length = 0 - }) + const { npm, org, outputs, lsArgs, rmArgs } = await mockOrg(t) await org.exec(['rm', 'orgname', 'username']) t.match( - orgRmArgs, + rmArgs(), { org: 'orgname', user: 'username', @@ -272,7 +247,7 @@ t.test('npm org rm', async t => { 'libnpmorg.rm received the correct args' ) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, @@ -280,18 +255,14 @@ t.test('npm org rm', async t => { 'libnpmorg.ls received the correct args' ) t.equal( - output[0], + outputs[0][0], 'Successfully removed username from orgname. You now have 0 members in this org.', 'printed the correct output' ) }) t.test('npm org rm - no org', async t => { - t.teardown(() => { - orgRmArgs = null - orgLsArgs = null - output.length = 0 - }) + const { org } = await mockOrg(t) await t.rejects( org.exec(['rm', '', 'username']), @@ -301,31 +272,23 @@ t.test('npm org rm - no org', async t => { }) t.test('npm org rm - no user', async t => { - t.teardown(() => { - orgRmArgs = null - orgLsArgs = null - output.length = 0 - }) + const { org } = await mockOrg(t) await t.rejects(org.exec(['rm', 'orgname']), /`username` is required/, 'threw the correct error') }) t.test('npm org rm - one user left', async t => { - orgList = { + const orgList = { one: 'developer', } - - t.teardown(() => { - orgList = {} - orgRmArgs = null - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs, rmArgs } = await mockOrg(t, { + orgList, }) await org.exec(['rm', 'orgname', 'username']) t.match( - orgRmArgs, + rmArgs(), { org: 'orgname', user: 'username', @@ -334,7 +297,7 @@ t.test('npm org rm - one user left', async t => { 'libnpmorg.rm received the correct args' ) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, @@ -342,25 +305,22 @@ t.test('npm org rm - one user left', async t => { 'libnpmorg.ls received the correct args' ) t.equal( - output[0], + outputs[0][0], 'Successfully removed username from orgname. You now have 1 member in this org.', 'printed the correct output' ) }) t.test('npm org rm - json output', async t => { - npm.flatOptions.json = true - t.teardown(() => { - npm.flatOptions.json = false - orgRmArgs = null - orgLsArgs = null - output.length = 0 + const config = { json: true } + const { npm, org, outputs, lsArgs, rmArgs } = await mockOrg(t, { + config, }) await org.exec(['rm', 'orgname', 'username']) t.match( - orgRmArgs, + rmArgs(), { org: 'orgname', user: 'username', @@ -369,7 +329,7 @@ t.test('npm org rm - json output', async t => { 'libnpmorg.rm received the correct args' ) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, @@ -377,7 +337,7 @@ t.test('npm org rm - json output', async t => { 'libnpmorg.ls received the correct args' ) t.strictSame( - JSON.parse(output[0]), + JSON.parse(outputs[0]), { user: 'username', org: 'orgname', @@ -389,18 +349,15 @@ t.test('npm org rm - json output', async t => { }) t.test('npm org rm - parseable output', async t => { - npm.flatOptions.parseable = true - t.teardown(() => { - npm.flatOptions.parseable = false - orgRmArgs = null - orgLsArgs = null - output.length = 0 + const config = { parseable: true } + const { npm, org, outputs, lsArgs, rmArgs } = await mockOrg(t, { + config, }) await org.exec(['rm', 'orgname', 'username']) t.match( - orgRmArgs, + rmArgs(), { org: 'orgname', user: 'username', @@ -409,7 +366,7 @@ t.test('npm org rm - parseable output', async t => { 'libnpmorg.rm received the correct args' ) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, @@ -417,7 +374,7 @@ t.test('npm org rm - parseable output', async t => { 'libnpmorg.ls received the correct args' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['user', 'org', 'userCount', 'deleted'], ['username', 'orgname', '0', 'true'], @@ -427,18 +384,15 @@ t.test('npm org rm - parseable output', async t => { }) t.test('npm org rm - silent output', async t => { - npm.config.set('loglevel', 'silent') - t.teardown(() => { - npm.config.set('loglevel', 'info') - orgRmArgs = null - orgLsArgs = null - output.length = 0 + const config = { loglevel: 'silent' } + const { npm, org, outputs, lsArgs, rmArgs } = await mockOrg(t, { + config, }) await org.exec(['rm', 'orgname', 'username']) t.match( - orgRmArgs, + rmArgs(), { org: 'orgname', user: 'username', @@ -447,149 +401,135 @@ t.test('npm org rm - silent output', async t => { 'libnpmorg.rm received the correct args' ) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'libnpmorg.ls received the correct args' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) t.test('npm org ls', async t => { - orgList = { + const orgList = { one: 'developer', two: 'admin', three: 'owner', } - t.teardown(() => { - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, }) await org.exec(['ls', 'orgname']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'receieved the correct args' ) - const out = ansiTrim(output[0]) + const out = ansiTrim(outputs[0][0]) t.match(out, /one.*developer/, 'contains the developer member') t.match(out, /two.*admin/, 'contains the admin member') t.match(out, /three.*owner/, 'contains the owner member') }) t.test('npm org ls - user filter', async t => { - orgList = { + const orgList = { username: 'admin', missing: 'admin', } - t.teardown(() => { - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, }) await org.exec(['ls', 'orgname', 'username']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'receieved the correct args' ) - const out = ansiTrim(output[0]) + const out = ansiTrim(outputs[0][0]) t.match(out, /username.*admin/, 'contains the filtered member') t.notMatch(out, /missing.*admin/, 'does not contain other members') }) t.test('npm org ls - user filter, missing user', async t => { - orgList = { + const orgList = { missing: 'admin', } - t.teardown(() => { - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, }) await org.exec(['ls', 'orgname', 'username']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'receieved the correct args' ) - const out = ansiTrim(output[0]) + const out = ansiTrim(outputs[0][0]) t.notMatch(out, /username/, 'does not contain the requested member') t.notMatch(out, /missing.*admin/, 'does not contain other members') }) t.test('npm org ls - no org', async t => { - t.teardown(() => { - orgLsArgs = null - output.length = 0 - }) - + const { org } = await mockOrg(t) await t.rejects(org.exec(['ls']), /`orgname` is required/, 'throws the correct error') }) t.test('npm org ls - json output', async t => { - npm.flatOptions.json = true - orgList = { + const config = { json: true } + const orgList = { one: 'developer', two: 'admin', three: 'owner', } - t.teardown(() => { - npm.flatOptions.json = false - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, + config, }) await org.exec(['ls', 'orgname']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'receieved the correct args' ) - t.strictSame(JSON.parse(output[0]), orgList, 'prints the correct output') + t.strictSame(JSON.parse(outputs[0]), orgList, 'prints the correct output') }) t.test('npm org ls - parseable output', async t => { - npm.flatOptions.parseable = true - orgList = { + const config = { parseable: true } + const orgList = { one: 'developer', two: 'admin', three: 'owner', } - t.teardown(() => { - npm.flatOptions.parseable = false - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, + config, }) await org.exec(['ls', 'orgname']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, @@ -597,7 +537,7 @@ t.test('npm org ls - parseable output', async t => { 'receieved the correct args' ) t.strictSame( - output.map(line => line.split(/\t/)), + outputs.map(line => line[0].split(/\t/)), [ ['user', 'role'], ['one', 'developer'], @@ -609,28 +549,26 @@ t.test('npm org ls - parseable output', async t => { }) t.test('npm org ls - silent output', async t => { - npm.config.set('loglevel', 'silent') - orgList = { + const config = { loglevel: 'silent' } + const orgList = { one: 'developer', two: 'admin', three: 'owner', } - t.teardown(() => { - npm.config.set('loglevel', 'info') - orgList = {} - orgLsArgs = null - output.length = 0 + const { npm, org, outputs, lsArgs } = await mockOrg(t, { + orgList, + config, }) await org.exec(['ls', 'orgname']) t.match( - orgLsArgs, + lsArgs(), { org: 'orgname', opts: npm.flatOptions, }, 'receieved the correct args' ) - t.strictSame(output, [], 'printed no output') + t.strictSame(outputs, [], 'printed no output') }) diff --git a/deps/npm/test/lib/commands/outdated.js b/deps/npm/test/lib/commands/outdated.js index 4803c7e17188ca..02f2067c5480eb 100644 --- a/deps/npm/test/lib/commands/outdated.js +++ b/deps/npm/test/lib/commands/outdated.js @@ -1,5 +1,9 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const MockRegistry = require('@npmcli/mock-registry') +const _mockNpm = require('../../fixtures/mock-npm') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + +t.cleanSnapshot = (str) => cleanCwd(str) const packument = spec => { const mocks = { @@ -68,69 +72,18 @@ const packument = spec => { return mocks[spec.name] } -let logs -const output = (msg) => { - logs = `${logs}\n${msg}` -} - -const globalDir = t.testdir({ - node_modules: { - cat: { - 'package.json': JSON.stringify({ - name: 'cat', - version: '1.0.0', - }, null, 2), +const fixtures = { + global: { + node_modules: { + cat: { + 'package.json': JSON.stringify({ + name: 'cat', + version: '1.0.0', + }, null, 2), + }, }, }, -}) - -const outdated = (dir, opts) => { - logs = '' - const Outdated = t.mock('../../../lib/commands/outdated.js', { - pacote: { - packument, - }, - }) - if (opts.config && opts.config.omit) { - opts.flatOptions = { - omit: opts.config.omit, - ...opts.flatOptions, - } - delete opts.config.omit - } - const npm = mockNpm({ - ...opts, - localPrefix: dir, - prefix: dir, - flatOptions: { - workspacesEnabled: true, - omit: [], - ...opts.flatOptions, - }, - globalDir: `${globalDir}/node_modules`, - output, - }) - return new Outdated(npm) -} - -t.beforeEach(() => logs = '') - -const { exitCode } = process - -t.afterEach(() => process.exitCode = exitCode) - -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') -} - -t.cleanSnapshot = (str) => redactCwd(str) - -t.test('should display outdated deps', t => { - const testDir = t.testdir({ + local: { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -186,145 +139,274 @@ t.test('should display outdated deps', t => { }, null, 2), }, }, + }, + workspaces: { + 'package.json': JSON.stringify({ + name: 'workspaces-project', + version: '1.0.0', + workspaces: ['packages/*'], + dependencies: { + dog: '^1.0.0', + }, + }), + node_modules: { + a: t.fixture('symlink', '../packages/a'), + b: t.fixture('symlink', '../packages/b'), + c: t.fixture('symlink', '../packages/c'), + cat: { + 'package.json': JSON.stringify({ + name: 'cat', + version: '1.0.0', + dependencies: { + dog: '2.0.0', + }, + }), + node_modules: { + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '2.0.0', + }), + }, + }, + }, + chai: { + 'package.json': JSON.stringify({ + name: 'chai', + version: '1.0.0', + }), + }, + dog: { + 'package.json': JSON.stringify({ + name: 'dog', + version: '1.0.1', + }), + }, + foo: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + dependencies: { + chai: '^1.0.0', + }, + }), + }, + zeta: { + 'package.json': JSON.stringify({ + name: 'zeta', + version: '1.0.0', + }), + }, + }, + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + dependencies: { + b: '^1.0.0', + cat: '^1.0.0', + foo: '^1.0.0', + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + dependencies: { + zeta: '^1.0.0', + }, + }), + }, + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + dependencies: { + theta: '^1.0.0', + }, + }), + }, + }, + }, +} + +const mockNpm = async (t, { prefixDir, ...opts } = {}) => { + const res = await _mockNpm(t, { + mocks: { + pacote: { + packument, + }, + }, + ...opts, + prefixDir, + }) + + // this is not currently used, but ensures that no requests are + // hitting the registry. + // XXX: the pacote mock should be replaced with mock registry calls + const registry = new MockRegistry({ + tap: t, + registry: res.npm.config.get('registry'), + strict: true, }) - t.test('outdated global', async t => { - await outdated(null, { + return { + ...res, + registry, + exec: (args) => res.npm.exec('outdated', args), + } +} + +t.test('should display outdated deps', async t => { + await t.test('outdated global', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + globalPrefixDir: fixtures.global, config: { global: true }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated', async t => { - await outdated(testDir, { + await t.test('outdated', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, + color: 'always', }, - color: true, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --omit=dev', async t => { - await outdated(testDir, { + await t.test('outdated --omit=dev', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, omit: ['dev'], + color: 'always', }, - color: true, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --omit=dev --omit=peer', async t => { - await outdated(testDir, { + await t.test('outdated --omit=dev --omit=peer', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, omit: ['dev', 'peer'], + color: 'always', }, - color: true, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --omit=prod', async t => { - await outdated(testDir, { + await t.test('outdated --omit=prod', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, omit: ['prod'], + color: 'always', }, - color: true, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --long', async t => { - await outdated(testDir, { + await t.test('outdated --long', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, long: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --json', async t => { - await outdated(testDir, { + await t.test('outdated --json', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, json: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --json --long', async t => { - await outdated(testDir, { + await t.test('outdated --json --long', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, json: true, long: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --parseable', async t => { - await outdated(testDir, { + await t.test('outdated --parseable', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, parseable: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --parseable --long', async t => { - await outdated(testDir, { + await t.test('outdated --parseable --long', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { - global: false, parseable: true, long: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated --all', async t => { - await outdated(testDir, { + await t.test('outdated --all', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, config: { all: true, }, - }).exec([]) + }) + await exec([]) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - t.test('outdated specific dep', async t => { - await outdated(testDir, { - config: { - global: false, - }, - }).exec(['cat']) + await t.test('outdated specific dep', async t => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.local, + }) + await exec(['cat']) t.equal(process.exitCode, 1) - t.matchSnapshot(logs) + t.matchSnapshot(joinedOutput()) }) - - t.end() }) t.test('should return if no outdated deps', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -340,18 +422,18 @@ t.test('should return if no outdated deps', async t => { }, null, 2), }, }, - }) + } - await outdated(testDir, { - config: { - global: false, - }, - }).exec([]) - t.equal(logs.length, 0, 'no logs') + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: testDir, + + }) + await exec([]) + t.equal(joinedOutput(), '', 'no logs') }) t.test('throws if error with a dep', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -367,20 +449,17 @@ t.test('throws if error with a dep', async t => { }, null, 2), }, }, + } + + const { exec } = await mockNpm(t, { + prefixDir: testDir, }) - await t.rejects( - outdated(testDir, { - config: { - global: false, - }, - }).exec([]), - 'There is an error with this package.' - ) + await t.rejects(exec([]), 'There is an error with this package.') }) t.test('should skip missing non-prod deps', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -389,18 +468,19 @@ t.test('should skip missing non-prod deps', async t => { }, }, null, 2), node_modules: {}, + } + + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: testDir, }) - await outdated(testDir, { - config: { - global: false, - }, - }).exec([]) - t.equal(logs.length, 0, 'no logs') + await exec([]) + + t.equal(joinedOutput(), '', 'no logs') }) t.test('should skip invalid pkg ranges', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -416,14 +496,17 @@ t.test('should skip invalid pkg ranges', async t => { }, null, 2), }, }, - }) + } - await outdated(testDir, {}).exec([]) - t.equal(logs.length, 0, 'no logs') + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: testDir, + }) + await exec([]) + t.equal(joinedOutput(), '', 'no logs') }) t.test('should skip git specs', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'delta', version: '1.0.0', @@ -439,194 +522,70 @@ t.test('should skip git specs', async t => { }, null, 2), }, }, - }) + } - await outdated(testDir, {}).exec([]) - t.equal(logs.length, 0, 'no logs') + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: testDir, + }) + await exec([]) + t.equal(joinedOutput(), '', 'no logs') }) t.test('workspaces', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-project', - version: '1.0.0', - workspaces: ['packages/*'], - dependencies: { - dog: '^1.0.0', - }, - }), - node_modules: { - a: t.fixture('symlink', '../packages/a'), - b: t.fixture('symlink', '../packages/b'), - c: t.fixture('symlink', '../packages/c'), - cat: { - 'package.json': JSON.stringify({ - name: 'cat', - version: '1.0.0', - dependencies: { - dog: '2.0.0', - }, - }), - node_modules: { - dog: { - 'package.json': JSON.stringify({ - name: 'dog', - version: '2.0.0', - }), - }, - }, - }, - chai: { - 'package.json': JSON.stringify({ - name: 'chai', - version: '1.0.0', - }), - }, - dog: { - 'package.json': JSON.stringify({ - name: 'dog', - version: '1.0.1', - }), - }, - foo: { - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - dependencies: { - chai: '^1.0.0', - }, - }), - }, - zeta: { - 'package.json': JSON.stringify({ - name: 'zeta', - version: '1.0.0', - }), - }, - }, - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - dependencies: { - b: '^1.0.0', - cat: '^1.0.0', - foo: '^1.0.0', - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - dependencies: { - zeta: '^1.0.0', - }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - dependencies: { - theta: '^1.0.0', - }, - }), - }, - }, - }) + const mockWorkspaces = async (t, { exitCode = 1, ...config } = {}) => { + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: fixtures.workspaces, + config, + }) - await outdated(testDir, {}).exec([]) + await exec([]) - t.matchSnapshot(logs, 'should display ws outdated deps human output') - t.equal(process.exitCode, 1) + t.matchSnapshot(joinedOutput(), 'output') + t.equal(process.exitCode, exitCode ?? undefined) + } - await outdated(testDir, { - flatOptions: { - workspacesEnabled: false, - }, - }).exec([]) + await t.test('should display ws outdated deps human output', t => + mockWorkspaces(t)) // TODO: This should display dog, but doesn't because arborist filters // workspace deps even if they're also root deps // This will be fixed in a future arborist version - t.matchSnapshot(logs, 'should display only root outdated when ws disabled') - - await outdated(testDir, { - config: { - json: true, - }, - }).exec([]) - t.matchSnapshot(logs, 'should display ws outdated deps json output') - t.equal(process.exitCode, 1) - - await outdated(testDir, { - config: { - parseable: true, - }, - }).exec([]) + await t.test('should display only root outdated when ws disabled', t => + mockWorkspaces(t, { workspaces: false, exitCode: null })) - t.matchSnapshot(logs, 'should display ws outdated deps parseable output') - t.equal(process.exitCode, 1) - - await outdated(testDir, { - config: { - all: true, - }, - }).exec([]) - - t.matchSnapshot(logs, 'should display all dependencies') - t.equal(process.exitCode, 1) + await t.test('should display ws outdated deps json output', t => + mockWorkspaces(t, { json: true })) - await outdated(testDir, { - color: true, - }).exec([]) + await t.test('should display ws outdated deps parseable output', t => + mockWorkspaces(t, { parseable: true })) - t.matchSnapshot(logs, 'should highlight ws in dependend by section') - t.equal(process.exitCode, 1) + await t.test('should display all dependencies', t => + mockWorkspaces(t, { all: true })) - await outdated(testDir, {}).execWorkspaces([], ['a']) - t.matchSnapshot(logs, 'should display results filtered by ws') - t.equal(process.exitCode, 1) + await t.test('should highlight ws in dependend by section', t => + mockWorkspaces(t, { color: 'always' })) - await outdated(testDir, { - config: { - json: true, - }, - }).execWorkspaces([], ['a']) - t.matchSnapshot(logs, 'should display json results filtered by ws') - t.equal(process.exitCode, 1) + await t.test('should display results filtered by ws', t => + mockWorkspaces(t, { workspace: 'a' })) - await outdated(testDir, { - config: { - parseable: true, - }, - }).execWorkspaces([], ['a']) - t.matchSnapshot(logs, 'should display parseable results filtered by ws') - t.equal(process.exitCode, 1) + await t.test('should display json results filtered by ws', t => + mockWorkspaces(t, { json: true, workspace: 'a' })) - await outdated(testDir, { - config: { - all: true, - }, - }).execWorkspaces([], ['a']) + await t.test('should display parseable results filtered by ws', t => + mockWorkspaces(t, { parseable: true, workspace: 'a' })) - t.matchSnapshot(logs, - 'should display nested deps when filtering by ws and using --all') - t.equal(process.exitCode, 1) + await t.test('should display nested deps when filtering by ws and using --all', t => + mockWorkspaces(t, { all: true, workspace: 'a' })) - await outdated(testDir, {}).execWorkspaces([], ['b']) - t.matchSnapshot(logs, - 'should display no results if ws has no deps to display') + await t.test('should display no results if ws has no deps to display', t => + mockWorkspaces(t, { workspace: 'b', exitCode: null })) - await outdated(testDir, {}).execWorkspaces([], ['c']) - t.matchSnapshot(logs, - 'should display missing deps when filtering by ws') + await t.test('should display missing deps when filtering by ws', t => + mockWorkspaces(t, { workspace: 'c', exitCode: 1 })) }) t.test('aliases', async t => { - const testDir = t.testdir({ + const testDir = { 'package.json': JSON.stringify({ name: 'display-aliases', version: '1.0.0', @@ -642,10 +601,13 @@ t.test('aliases', async t => { }), }, }, - }) + } - await outdated(testDir, {}).exec([]) + const { exec, joinedOutput } = await mockNpm(t, { + prefixDir: testDir, + }) + await exec([]) - t.matchSnapshot(logs, 'should display aliased outdated dep output') + t.matchSnapshot(joinedOutput(), 'should display aliased outdated dep output') t.equal(process.exitCode, 1) }) diff --git a/deps/npm/test/lib/commands/owner.js b/deps/npm/test/lib/commands/owner.js index 5b6bb443712f02..f9399a60cdf81b 100644 --- a/deps/npm/test/lib/commands/owner.js +++ b/deps/npm/test/lib/commands/owner.js @@ -470,8 +470,10 @@ t.test('workspaces', async t => { t.test('owner no args --workspace', async t => { const { npm } = await loadMockNpm(t, { prefixDir: workspaceFixture, + config: { + workspace: 'workspace-a', + }, }) - npm.config.set('workspace', ['workspace-a']) await t.rejects( npm.exec('owner', []), { code: 'EUSAGE' }, @@ -482,9 +484,7 @@ t.test('workspaces', async t => { t.test('owner ls implicit workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => path.join(prefix, 'workspace-a'), - }), + chdir: ({ prefix }) => path.join(prefix, 'workspace-a'), }) await registryPackage(t, npm.config.get('registry'), 'workspace-a') await npm.exec('owner', ['ls']) @@ -494,11 +494,10 @@ t.test('workspaces', async t => { t.test('owner ls explicit workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), + config: { + workspace: 'workspace-a', + }, }) - npm.config.set('workspace', ['workspace-a']) await registryPackage(t, npm.config.get('registry'), 'workspace-a') await npm.exec('owner', ['ls']) t.match(joinedOutput(), maintainers.map(m => `${m.name} <${m.email}>`).join('\n')) @@ -507,9 +506,7 @@ t.test('workspaces', async t => { t.test('owner ls implicit workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => path.join(prefix, 'workspace-a'), - }), + chdir: ({ prefix }) => path.join(prefix, 'workspace-a'), }) await registryPackage(t, npm.config.get('registry'), packageName) await npm.exec('owner', ['ls', packageName]) @@ -519,11 +516,10 @@ t.test('workspaces', async t => { t.test('owner ls explicit workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), + config: { + workspace: 'workspace-a', + }, }) - npm.config.set('workspace', ['workspace-a']) await registryPackage(t, npm.config.get('registry'), packageName) await npm.exec('owner', ['ls', packageName]) t.match(joinedOutput(), maintainers.map(m => `${m.name} <${m.email}>`).join('\n')) @@ -532,9 +528,7 @@ t.test('workspaces', async t => { t.test('owner add implicit workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => path.join(prefix, 'workspace-a'), - }), + chdir: ({ prefix }) => path.join(prefix, 'workspace-a'), }) const username = 'foo' const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') }) @@ -563,8 +557,10 @@ t.test('workspaces', async t => { t.test('owner add --workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, + config: { + workspace: 'workspace-a', + }, }) - npm.config.set('workspace', ['workspace-a']) const username = 'foo' const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') }) @@ -592,9 +588,7 @@ t.test('workspaces', async t => { t.test('owner rm --workspace', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: workspaceFixture, - globals: ({ prefix }) => ({ - 'process.cwd': () => path.join(prefix, 'workspace-a'), - }), + chdir: ({ prefix }) => path.join(prefix, 'workspace-a'), }) const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') }) diff --git a/deps/npm/test/lib/commands/pack.js b/deps/npm/test/lib/commands/pack.js index 199afc640f0353..3e7c0225c3068c 100644 --- a/deps/npm/test/lib/commands/pack.js +++ b/deps/npm/test/lib/commands/pack.js @@ -3,11 +3,6 @@ const { load: loadMockNpm } = require('../../fixtures/mock-npm') const path = require('path') const fs = require('fs') -const cwd = process.cwd() -t.afterEach(t => { - process.chdir(cwd) -}) - t.test('should pack current directory with no arguments', async t => { const { npm, outputs, logs } = await loadMockNpm(t, { prefixDir: { @@ -17,7 +12,6 @@ t.test('should pack current directory with no arguments', async t => { }), }, }) - process.chdir(npm.prefix) await npm.exec('pack', []) const filename = 'test-package-1.0.0.tgz' t.strictSame(outputs, [[filename]]) @@ -35,7 +29,6 @@ t.test('follows pack-destination config', async t => { 'tar-destination': {}, }, }) - process.chdir(npm.prefix) npm.config.set('pack-destination', path.join(npm.prefix, 'tar-destination')) await npm.exec('pack', []) const filename = 'test-package-1.0.0.tgz' @@ -52,7 +45,6 @@ t.test('should pack given directory for scoped package', async t => { }), }, }) - process.chdir(npm.prefix) await npm.exec('pack', []) const filename = 'npm-test-package-1.0.0.tgz' t.strictSame(outputs, [[filename]]) @@ -68,7 +60,6 @@ t.test('should log output as valid json', async t => { }), }, }) - process.chdir(npm.prefix) npm.config.set('json', true) await npm.exec('pack', []) const filename = 'test-package-1.0.0.tgz' @@ -86,7 +77,6 @@ t.test('should log scoped package output as valid json', async t => { }), }, }) - process.chdir(npm.prefix) npm.config.set('json', true) await npm.exec('pack', []) const filename = 'myscope-test-package-1.0.0.tgz' @@ -105,7 +95,6 @@ t.test('dry run', async t => { }, }) npm.config.set('dry-run', true) - process.chdir(npm.prefix) await npm.exec('pack', []) const filename = 'test-package-1.0.0.tgz' t.strictSame(outputs, [[filename]]) @@ -119,7 +108,6 @@ t.test('invalid packument', async t => { 'package.json': '{}', }, }) - process.chdir(npm.prefix) await t.rejects( npm.exec('pack', []), /Invalid package, must have name and version/ @@ -162,28 +150,24 @@ t.test('workspaces', async t => { t.test('all workspaces', async t => { const { npm, outputs } = await loadWorkspaces(t) - process.chdir(npm.prefix) await npm.exec('pack', []) t.strictSame(outputs, [['workspace-a-1.0.0.tgz'], ['workspace-b-1.0.0.tgz']]) }) t.test('all workspaces, `.` first arg', async t => { const { npm, outputs } = await loadWorkspaces(t) - process.chdir(npm.prefix) await npm.exec('pack', ['.']) t.strictSame(outputs, [['workspace-a-1.0.0.tgz'], ['workspace-b-1.0.0.tgz']]) }) t.test('one workspace', async t => { const { npm, outputs } = await loadWorkspaces(t) - process.chdir(npm.prefix) await npm.exec('pack', ['workspace-a']) t.strictSame(outputs, [['workspace-a-1.0.0.tgz']]) }) t.test('specific package', async t => { const { npm, outputs } = await loadWorkspaces(t) - process.chdir(npm.prefix) await npm.exec('pack', [npm.prefix]) t.strictSame(outputs, [['workspaces-test-1.0.0.tgz']]) }) diff --git a/deps/npm/test/lib/commands/pkg.js b/deps/npm/test/lib/commands/pkg.js index 49234e4cce3230..ef38d537308a53 100644 --- a/deps/npm/test/lib/commands/pkg.js +++ b/deps/npm/test/lib/commands/pkg.js @@ -1,76 +1,61 @@ const { resolve } = require('path') const { readFileSync } = require('fs') const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') -} +const _mockNpm = require('../../fixtures/mock-npm') +const { cleanCwd } = require('../../fixtures/clean-snapshot') -t.cleanSnapshot = (str) => redactCwd(str) +t.cleanSnapshot = (str) => cleanCwd(str) -let OUTPUT = '' -const config = { - global: false, - force: false, - 'pkg-cast': 'string', -} -const npm = mockNpm({ - localPrefix: t.testdirName, - config, - output: (str) => { - OUTPUT += str - }, -}) +const mockNpm = async (t, { ...opts } = {}) => { + const res = await _mockNpm(t, opts) -const Pkg = require('../../../lib/commands/pkg.js') -const pkg = new Pkg(npm) + const readPackageJson = (dir = '') => + JSON.parse(readFileSync(resolve(res.prefix, dir, 'package.json'), 'utf8')) -const readPackageJson = (path) => { - path = path || npm.localPrefix - return JSON.parse(readFileSync(resolve(path, 'package.json'), 'utf8')) + return { + ...res, + pkg: (...args) => res.npm.exec('pkg', args), + readPackageJson, + OUTPUT: () => res.joinedOutput(), + } } -t.afterEach(() => { - config.global = false - config.json = false - npm.localPrefix = t.testdirName - OUTPUT = '' -}) - t.test('no args', async t => { + const { pkg } = await mockNpm(t) + await t.rejects( - pkg.exec([]), + pkg(), { code: 'EUSAGE' }, 'should throw usage error' ) }) t.test('no global mode', async t => { - config.global = true + const { pkg } = await mockNpm(t, { + config: { global: true }, + }) + await t.rejects( - pkg.exec(['get', 'foo']), + pkg('get', 'foo'), { code: 'EPKGGLOBAL' }, 'should throw no global mode error' ) }) t.test('get no args', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }, }) - await pkg.exec(['get']) + await pkg('get') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), { name: 'foo', version: '1.1.1', @@ -80,37 +65,41 @@ t.test('get no args', async t => { }) t.test('get single arg', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }, }) - await pkg.exec(['get', 'version']) + await pkg('get', 'version') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), '1.1.1', 'should print retrieved package.json field' ) }) t.test('get nested arg', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - scripts: { - test: 'node test.js', - }, - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + scripts: { + test: 'node test.js', + }, + }), + }, }) - await pkg.exec(['get', 'scripts.test']) + await pkg('get', 'scripts.test') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), 'node test.js', 'should print retrieved nested field' ) @@ -121,18 +110,20 @@ t.test('get array field', async t => { 'index.js', 'cli.js', ] - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - files, - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }, }) - await pkg.exec(['get', 'files']) + await pkg('get', 'files') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), files, 'should print retrieved array field' ) @@ -143,18 +134,20 @@ t.test('get array item', async t => { 'index.js', 'cli.js', ] - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - files, - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }, }) - await pkg.exec(['get', 'files[0]']) + await pkg('get', 'files[0]') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), 'index.js', 'should print retrieved array field' ) @@ -171,17 +164,19 @@ t.test('get array nested items notation', async t => { url: 'http://example.com/gar', }, ] - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - contributors, - }), + const { pkg, OUTPUT } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + contributors, + }), + }, }) - await pkg.exec(['get', 'contributors.name']) + await pkg('get', 'contributors.name') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), { 'contributors[0].name': 'Ruy', 'contributors[1].name': 'Gar', @@ -191,33 +186,39 @@ t.test('get array nested items notation', async t => { }) t.test('set no args', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }), + const { pkg } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'foo' }), + }, }) await t.rejects( - pkg.exec(['set']), + pkg('set'), { code: 'EUSAGE' }, 'should throw an error if no args' ) }) t.test('set missing value', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }), + const { pkg } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'foo' }), + }, }) await t.rejects( - pkg.exec(['set', 'key=']), + pkg('set', 'key='), { code: 'EUSAGE' }, 'should throw an error if missing value' ) }) t.test('set missing key', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }), + const { pkg } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'foo' }), + }, }) await t.rejects( - pkg.exec(['set', '=value']), + pkg('set', '=value'), { code: 'EUSAGE' }, 'should throw an error if missing key' ) @@ -228,11 +229,13 @@ t.test('set single field', async t => { name: 'foo', version: '1.1.1', } - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify(json), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify(json), + }, }) - await pkg.exec(['set', 'description=Awesome stuff']) + await pkg('set', 'description=Awesome stuff') t.strictSame( readPackageJson(), { @@ -251,11 +254,13 @@ t.test('push to array syntax', async t => { 'foo', ], } - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify(json), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify(json), + }, }) - await pkg.exec(['set', 'keywords[]=bar', 'keywords[]=baz']) + await pkg('set', 'keywords[]=bar', 'keywords[]=baz') t.strictSame( readPackageJson(), { @@ -275,11 +280,13 @@ t.test('set multiple fields', async t => { name: 'foo', version: '1.1.1', } - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify(json), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify(json), + }, }) - await pkg.exec(['set', 'bin.foo=foo.js', 'scripts.test=node test.js']) + await pkg('set', 'bin.foo=foo.js', 'scripts.test=node test.js') t.strictSame( readPackageJson(), { @@ -300,11 +307,13 @@ t.test('set = separate value', async t => { name: 'foo', version: '1.1.1', } - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify(json), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify(json), + }, }) - await pkg.exec(['set', 'tap[test-env][0]=LC_ALL=sk']) + await pkg('set', 'tap[test-env][0]=LC_ALL=sk') t.strictSame( readPackageJson(), { @@ -320,15 +329,17 @@ t.test('set = separate value', async t => { }) t.test('set --json', async t => { - config.json = true - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.1.1', - }), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }, + config: { json: true }, }) - await pkg.exec(['set', 'private=true']) + await pkg('set', 'private=true') t.strictSame( readPackageJson(), { @@ -339,7 +350,7 @@ t.test('set --json', async t => { 'should add boolean field to package.json' ) - await pkg.exec(['set', 'tap.timeout=60']) + await pkg('set', 'tap.timeout=60') t.strictSame( readPackageJson(), { @@ -353,7 +364,7 @@ t.test('set --json', async t => { 'should add number field to package.json' ) - await pkg.exec(['set', 'foo={ "bar": { "baz": "BAZ" } }']) + await pkg('set', 'foo={ "bar": { "baz": "BAZ" } }') t.strictSame( readPackageJson(), { @@ -372,7 +383,7 @@ t.test('set --json', async t => { 'should add object field to package.json' ) - await pkg.exec(['set', 'workspaces=["packages/*"]']) + await pkg('set', 'workspaces=["packages/*"]') t.strictSame( readPackageJson(), { @@ -394,7 +405,7 @@ t.test('set --json', async t => { 'should add object field to package.json' ) - await pkg.exec(['set', 'description="awesome"']) + await pkg('set', 'description="awesome"') t.strictSame( readPackageJson(), { @@ -419,35 +430,41 @@ t.test('set --json', async t => { }) t.test('delete no args', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }), + const { pkg } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'foo' }), + }, }) await t.rejects( - pkg.exec(['delete']), + pkg('delete'), { code: 'EUSAGE' }, 'should throw an error if deleting no args' ) }) t.test('delete invalid key', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'foo' }), + const { pkg } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'foo' }), + }, }) await t.rejects( - pkg.exec(['delete', '']), + pkg('delete', ''), { code: 'EUSAGE' }, 'should throw an error if deleting invalid args' ) }) t.test('delete single field', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - }), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }, }) - await pkg.exec(['delete', 'version']) + await pkg('delete', 'version') t.strictSame( readPackageJson(), { @@ -458,14 +475,16 @@ t.test('delete single field', async t => { }) t.test('delete multiple field', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - description: 'awesome', - }), + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + description: 'awesome', + }), + }, }) - await pkg.exec(['delete', 'version', 'description']) + await pkg('delete', 'version', 'description') t.strictSame( readPackageJson(), { @@ -476,22 +495,24 @@ t.test('delete multiple field', async t => { }) t.test('delete nested field', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - info: { - foo: { - bar: [ - { - baz: 'deleteme', - }, - ], + const { pkg, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + info: { + foo: { + bar: [ + { + baz: 'deleteme', + }, + ], + }, }, - }, - }), + }), + }, }) - await pkg.exec(['delete', 'info.foo.bar[0].baz']) + await pkg('delete', 'info.foo.bar[0].baz') t.strictSame( readPackageJson(), { @@ -510,34 +531,37 @@ t.test('delete nested field', async t => { }) t.test('workspaces', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'root', - version: '1.0.0', - workspaces: [ - 'packages/*', - ], - }), - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.2.3', - }), + const { pkg, OUTPUT, readPackageJson } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: [ + 'packages/*', + ], + }), + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.2.3', + }), + }, }, }, + config: { workspaces: true }, }) - await pkg.execWorkspaces(['get', 'name', 'version'], []) + await pkg('get', 'name', 'version') t.strictSame( - JSON.parse(OUTPUT), + JSON.parse(OUTPUT()), { a: { name: 'a', @@ -551,10 +575,10 @@ t.test('workspaces', async t => { 'should return expected result for configured workspaces' ) - await pkg.execWorkspaces(['set', 'funding=http://example.com'], []) + await pkg('set', 'funding=http://example.com') t.strictSame( - readPackageJson(resolve(npm.localPrefix, 'packages/a')), + readPackageJson('packages/a'), { name: 'a', version: '1.0.0', @@ -564,7 +588,7 @@ t.test('workspaces', async t => { ) t.strictSame( - readPackageJson(resolve(npm.localPrefix, 'packages/b')), + readPackageJson('packages/b'), { name: 'b', version: '1.2.3', @@ -573,9 +597,10 @@ t.test('workspaces', async t => { 'should add field to workspace b' ) - await pkg.execWorkspaces(['delete', 'version'], []) + await pkg('delete', 'version') + t.strictSame( - readPackageJson(resolve(npm.localPrefix, 'packages/a')), + readPackageJson('packages/a'), { name: 'a', funding: 'http://example.com', @@ -584,7 +609,7 @@ t.test('workspaces', async t => { ) t.strictSame( - readPackageJson(resolve(npm.localPrefix, 'packages/b')), + readPackageJson('packages/b'), { name: 'b', funding: 'http://example.com', diff --git a/deps/npm/test/lib/commands/profile.js b/deps/npm/test/lib/commands/profile.js index 09fd08cfc5329e..00ccf2607524ad 100644 --- a/deps/npm/test/lib/commands/profile.js +++ b/deps/npm/test/lib/commands/profile.js @@ -1,50 +1,45 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -let result = '' -const config = { - otp: '', - json: false, - parseable: false, - registry: 'https://registry.npmjs.org/', -} -const flatOptions = { - registry: 'https://registry.npmjs.org/', -} -const npm = mockNpm({ - config, - flatOptions, - output: (...msg) => { - result = result ? `${result}\n${msg.join('\n')}` : msg.join('\n') - }, -}) -const mocks = { - npmlog: { - gauge: { show () {} }, - }, - 'proc-log': { - info () {}, - notice () {}, - warn () {}, - }, - 'npm-profile': { - async get () {}, - async set () {}, - async createToken () {}, - }, - 'qrcode-terminal': { generate: (url, cb) => cb() }, - 'cli-table3': class extends Array { - toString () { - return this.filter(Boolean) - .map(i => [...Object.entries(i)].map(i => i.join(': '))) - .join('\n') - } - }, - '../../../lib/utils/read-user-info.js': { - async password () {}, - async otp () {}, - }, +const mockNpm = require('../../fixtures/mock-npm') + +const mockProfile = async (t, { npmProfile, readUserInfo, qrcode, ...opts } = {}) => { + const mocks = { + 'npm-profile': npmProfile || { + async get () {}, + async set () {}, + async createToken () {}, + }, + 'qrcode-terminal': qrcode || { generate: (url, cb) => cb() }, + 'cli-table3': class extends Array { + toString () { + return this.filter(Boolean) + .map(i => [...Object.entries(i)].map(v => v.join(': '))) + .join('\n') + } + }, + '{LIB}/utils/read-user-info.js': readUserInfo || { + async password () {}, + async otp () {}, + }, + } + + const mock = await mockNpm(t, { + ...opts, + mocks: { + ...mocks, + ...opts.mocks, + }, + }) + + return { + ...mock, + result: () => mock.joinedOutput(), + profile: { + exec: (args) => mock.npm.exec('profile', args), + usage: () => mock.npm.cmd('profile').then(c => c.usage), + }, + } } + const userProfile = { tfa: { pending: false, mode: 'auth-and-writes' }, name: 'foo', @@ -60,53 +55,44 @@ const userProfile = { github: 'https://github.com/npm', } -t.afterEach(() => { - result = '' - flatOptions.otp = '' - config.json = false - config.parseable = false - config.registry = 'https://registry.npmjs.org/' -}) - -const Profile = t.mock('../../../lib/commands/profile.js', mocks) -const profile = new Profile(npm) - t.test('no args', async t => { - await t.rejects(profile.exec([]), profile.usage) + const { profile } = await mockProfile(t) + await t.rejects(profile.exec([]), await profile.usage()) }) -t.test('profile get no args', t => { - const npmProfile = { +t.test('profile get no args', async t => { + const defaultNpmProfile = { async get () { return userProfile }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) - t.test('default output', async t => { + const { profile, result } = await mockProfile(t, { npmProfile: defaultNpmProfile }) await profile.exec(['get']) - t.matchSnapshot(result, 'should output table with contents') + t.matchSnapshot(result(), 'should output table with contents') }) t.test('--json', async t => { - config.json = true + const { profile, result } = await mockProfile(t, { + npmProfile: defaultNpmProfile, + config: { json: true }, + }) await profile.exec(['get']) - t.same(JSON.parse(result), userProfile, 'should output json profile result') + t.same(JSON.parse(result()), userProfile, 'should output json profile result') }) t.test('--parseable', async t => { - config.parseable = true + const { profile, result } = await mockProfile(t, { + npmProfile: defaultNpmProfile, + config: { parseable: true }, + }) await profile.exec(['get']) - t.matchSnapshot(result, 'should output all profile info as parseable result') + t.matchSnapshot(result(), 'should output all profile info as parseable result') }) t.test('no tfa enabled', async t => { @@ -118,15 +104,10 @@ t.test('profile get no args', t => { } }, } - - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) + const { profile, result } = await mockProfile(t, { npmProfile }) await profile.exec(['get']) - t.matchSnapshot(result, 'should output expected profile values') + t.matchSnapshot(result(), 'should output expected profile values') }) t.test('unverified email', async t => { @@ -139,15 +120,11 @@ t.test('profile get no args', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) + const { profile, result } = await mockProfile(t, { npmProfile }) await profile.exec(['get']) - t.matchSnapshot(result, 'should output table with contents') + t.matchSnapshot(result(), 'should output table with contents') }) t.test('profile has cidr_whitelist item', async t => { @@ -160,127 +137,111 @@ t.test('profile get no args', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) + const { profile, result } = await mockProfile(t, { npmProfile }) await profile.exec(['get']) - t.matchSnapshot(result, 'should output table with contents') + t.matchSnapshot(result(), 'should output table with contents') }) - - t.end() }) -t.test('profile get ', t => { +t.test('profile get ', async t => { const npmProfile = { async get () { return userProfile }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) - t.test('default output', async t => { + const { profile, result } = await mockProfile(t, { npmProfile }) + await profile.exec(['get', 'name']) - t.equal(result, 'foo', 'should output value result') + t.equal(result(), 'foo', 'should output value result') }) t.test('--json', async t => { - config.json = true + const { profile, result } = await mockProfile(t, { + npmProfile, + config: { json: true }, + }) await profile.exec(['get', 'name']) t.same( - JSON.parse(result), + JSON.parse(result()), userProfile, 'should output json profile result ignoring args filter' ) }) t.test('--parseable', async t => { - config.parseable = true + const { profile, result } = await mockProfile(t, { + npmProfile, + config: { parseable: true }, + }) await profile.exec(['get', 'name']) - t.matchSnapshot(result, 'should output parseable result value') + t.matchSnapshot(result(), 'should output parseable result value') }) - - t.end() }) -t.test('profile get multiple args', t => { +t.test('profile get multiple args', async t => { const npmProfile = { async get () { return userProfile }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - }) - const profile = new Profile(npm) - t.test('default output', async t => { + const { profile, result } = await mockProfile(t, { + npmProfile, + }) await profile.exec(['get', 'name', 'email', 'github']) - t.matchSnapshot(result, 'should output all keys') + t.matchSnapshot(result(), 'should output all keys') }) t.test('--json', async t => { - config.json = true + const config = { json: true } + const { profile, result } = await mockProfile(t, { + npmProfile, + config, + }) await profile.exec(['get', 'name', 'email', 'github']) - t.same(JSON.parse(result), userProfile, 'should output json profile result and ignore args') + t.same(JSON.parse(result()), userProfile, 'should output json profile result and ignore args') }) t.test('--parseable', async t => { - config.parseable = true + const config = { parseable: true } + const { profile, result } = await mockProfile(t, { + npmProfile, + config, + }) await profile.exec(['get', 'name', 'email', 'github']) - t.matchSnapshot(result, 'should output parseable profile value results') + t.matchSnapshot(result(), 'should output parseable profile value results') }) t.test('comma separated', async t => { + const { profile, result } = await mockProfile(t, { + npmProfile, + }) + await profile.exec(['get', 'name,email,github']) - t.matchSnapshot(result, 'should output all keys') + t.matchSnapshot(result(), 'should output all keys') }) - - t.end() }) -t.test('profile set ', t => { - const npmProfile = t => ({ - async get () { - return userProfile - }, - async set (newUser, conf) { - t.match( - newUser, - { - fullname: 'Lorem Ipsum', - }, - 'should set new value to key' - ) - return { - ...userProfile, - ...newUser, - } - }, - }) - +t.test('profile set ', async t => { t.test('no key', async t => { + const { profile } = await mockProfile(t) + await t.rejects( profile.exec(['set']), /npm profile set /, @@ -289,6 +250,7 @@ t.test('profile set ', t => { }) t.test('no value', async t => { + const { profile } = await mockProfile(t) await t.rejects( profile.exec(['set', 'email']), /npm profile set /, @@ -297,6 +259,7 @@ t.test('profile set ', t => { }) t.test('set password', async t => { + const { profile } = await mockProfile(t) await t.rejects( profile.exec(['set', 'password', '1234']), /Do not include your current or new passwords on the command line./, @@ -305,6 +268,7 @@ t.test('profile set ', t => { }) t.test('unwritable key', async t => { + const { profile } = await mockProfile(t) await await t.rejects( profile.exec(['set', 'name', 'foo']), /"name" is not a property we can set./, @@ -312,35 +276,51 @@ t.test('profile set ', t => { ) }) - t.test('writable key', t => { + const defaultNpmProfile = t => ({ + async get () { + return userProfile + }, + async set (newUser) { + t.match( + newUser, + { + fullname: 'Lorem Ipsum', + }, + 'should set new value to key' + ) + return { + ...userProfile, + ...newUser, + } + }, + }) + + t.test('writable key', async t => { t.test('default output', async t => { t.plan(2) - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), + const { profile, result } = await mockProfile(t, { + npmProfile: defaultNpmProfile(t), }) - const profile = new Profile(npm) await profile.exec(['set', 'fullname', 'Lorem Ipsum']) - t.equal(result, 'Set\nfullname\nto\nLorem Ipsum', 'should output set key success msg') + t.equal(result(), 'Set fullname to Lorem Ipsum', 'should output set key success msg') }) t.test('--json', async t => { t.plan(2) - config.json = true + const config = { json: true } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), + const { profile, result } = await mockProfile(t, { + npmProfile: defaultNpmProfile(t), + config, }) - const profile = new Profile(npm) await profile.exec(['set', 'fullname', 'Lorem Ipsum']) t.same( - JSON.parse(result), + JSON.parse(result()), { fullname: 'Lorem Ipsum', }, @@ -351,30 +331,26 @@ t.test('profile set ', t => { t.test('--parseable', async t => { t.plan(2) - config.parseable = true - - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), + const config = { parseable: true } + const { profile, result } = await mockProfile(t, { + npmProfile: defaultNpmProfile(t), + config, }) - const profile = new Profile(npm) await profile.exec(['set', 'fullname', 'Lorem Ipsum']) - t.matchSnapshot(result, 'should output parseable set key success msg') + t.matchSnapshot(result(), 'should output parseable set key success msg') }) - - t.end() }) t.test('write new email', async t => { - t.plan(3) + t.plan(2) const npmProfile = { async get () { return userProfile }, - async set (newUser, conf) { + async set (newUser) { t.match( newUser, { @@ -382,7 +358,6 @@ t.test('profile set ', t => { }, 'should set new value to email' ) - t.match(conf, npm.flatOptions, 'should forward flatOptions config') return { ...userProfile, ...newUser, @@ -390,24 +365,22 @@ t.test('profile set ', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, + const { profile, result } = await mockProfile(t, { + npmProfile, }) - const profile = new Profile(npm) await profile.exec(['set', 'email', 'foo@npmjs.com']) - t.equal(result, 'Set\nemail\nto\nfoo@npmjs.com', 'should output set key success msg') + t.equal(result(), 'Set email to foo@npmjs.com', 'should output set key success msg') }) t.test('change password', async t => { - t.plan(6) + t.plan(5) const npmProfile = { async get () { return userProfile }, - async set (newUser, conf) { + async set (newUser) { t.match( newUser, { @@ -418,7 +391,6 @@ t.test('profile set ', t => { }, 'should set new password' ) - t.match(conf, npm.flatOptions, 'should forward flatOptions config') return { ...userProfile, } @@ -441,30 +413,27 @@ t.test('profile set ', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, }) - const profile = new Profile(npm) await profile.exec(['set', 'password']) - t.equal(result, 'Set\npassword', 'should output set password success msg') + t.equal(result(), 'Set password', 'should output set password success msg') }) t.test('password confirmation mismatch', async t => { - t.plan(3) + t.plan(2) + let passwordPromptCount = 0 const npmProfile = { async get () { return userProfile }, - async set (newUser, conf) { - return { - ...userProfile, - } + async set () { + return { ...userProfile } }, } @@ -485,38 +454,26 @@ t.test('profile set ', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - npmlog: { - gauge: { - show () {}, - }, - }, - 'proc-log': { - warn (title, msg) { - t.equal(title, 'profile', 'should use expected profile') - t.equal( - msg, - 'Passwords do not match, please try again.', - 'should log password mismatch message' - ) - }, - }, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { profile, result, logs } = await mockProfile(t, { + npmProfile, + readUserInfo, }) - const profile = new Profile(npm) await profile.exec(['set', 'password']) - t.equal(result, 'Set\npassword', 'should output set password success msg') - }) + t.equal( + logs.warn[0][1], + 'Passwords do not match, please try again.', + 'should log password mismatch message' + ) - t.end() + t.equal(result(), 'Set password', 'should output set password success msg') + }) }) -t.test('enable-2fa', t => { +t.test('enable-2fa', async t => { t.test('invalid args', async t => { + const { profile } = await mockProfile(t) await t.rejects( profile.exec(['enable-2fa', 'foo', 'bar']), /npm profile enable-2fa \[auth-and-writes|auth-only\]/, @@ -525,6 +482,7 @@ t.test('enable-2fa', t => { }) t.test('invalid two factor auth mode', async t => { + const { profile } = await mockProfile(t) await t.rejects( profile.exec(['enable-2fa', 'foo']), /Invalid two-factor authentication mode "foo"/, @@ -533,7 +491,8 @@ t.test('enable-2fa', t => { }) t.test('no support for --json output', async t => { - config.json = true + const config = { json: true } + const { profile } = await mockProfile(t, { config }) await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -544,7 +503,8 @@ t.test('enable-2fa', t => { }) t.test('no support for --parseable output', async t => { - config.parseable = true + const config = { parseable: true } + const { profile } = await mockProfile(t, { config }) await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -557,12 +517,6 @@ t.test('enable-2fa', t => { t.test('no bearer tokens returned by registry', async t => { t.plan(3) - // mock legacy basic auth style - npm.config.getCredentialsByURI = reg => { - t.equal(reg, flatOptions.registry, 'should use expected registry') - return { auth: Buffer.from('foo:bar').toString('base64') } - } - const npmProfile = { async createToken (pass) { t.match(pass, 'bar', 'should use password for basic auth') @@ -570,11 +524,16 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, + const { npm, profile } = await mockProfile(t, { + npmProfile, }) - const profile = new Profile(npm) + + // mock legacy basic auth style + // XXX: use mock registry + npm.config.getCredentialsByURI = reg => { + t.equal(reg, npm.flatOptions.registry, 'should use expected registry') + return { auth: Buffer.from('foo:bar').toString('base64') } + } await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -586,22 +545,21 @@ t.test('enable-2fa', t => { }) t.test('from basic username/password auth', async t => { - // mock legacy basic auth style with user/pass - npm.config.getCredentialsByURI = () => { - return { username: 'foo', password: 'bar' } - } - const npmProfile = { async createToken (pass) { return {} }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, + const { npm, profile } = await mockProfile(t, { + npmProfile, }) - const profile = new Profile(npm) + + // mock legacy basic auth style with user/pass + // XXX: use mock registry + npm.config.getCredentialsByURI = () => { + return { username: 'foo', password: 'bar' } + } await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -613,12 +571,10 @@ t.test('enable-2fa', t => { }) t.test('no auth found', async t => { - npm.config.getCredentialsByURI = () => ({}) + const { npm, profile } = await mockProfile(t) - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - }) - const profile = new Profile(npm) + // XXX: use mock registry + npm.config.getCredentialsByURI = () => ({}) await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -627,20 +583,7 @@ t.test('enable-2fa', t => { }) t.test('from basic auth, asks for otp', async t => { - t.plan(10) - - // mock legacy basic auth style - npm.config.getCredentialsByURI = reg => { - t.equal(reg, flatOptions.registry, 'should use expected registry') - return { auth: Buffer.from('foo:bar').toString('base64') } - } - npm.config.setCredentialsByURI = (registry, { token }) => { - t.equal(registry, flatOptions.registry, 'should set expected registry') - t.equal(token, 'token', 'should set expected token') - } - npm.config.save = type => { - t.equal(type, 'user', 'should save to user config') - } + t.plan(9) const npmProfile = { async createToken (pass) { @@ -660,14 +603,6 @@ t.test('enable-2fa', t => { }, 'should set tfa mode' ) - t.match( - conf, - { - ...npm.flatOptions, - otp: '123456', - }, - 'should forward flatOptions config' - ) return { ...userProfile, tfa: null, @@ -690,16 +625,28 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, }) - const profile = new Profile(npm) + + // mock legacy basic auth style + // XXX: use mock registry + npm.config.getCredentialsByURI = reg => { + t.equal(reg, npm.flatOptions.registry, 'should use expected registry') + return { auth: Buffer.from('foo:bar').toString('base64') } + } + npm.config.setCredentialsByURI = (registry, { token }) => { + t.equal(registry, npm.flatOptions.registry, 'should set expected registry') + t.equal(token, 'token', 'should set expected token') + } + npm.config.save = type => { + t.equal(type, 'user', 'should save to user config') + } await profile.exec(['enable-2fa', 'auth-only']) t.equal( - result, + result(), 'Two factor authentication mode changed to: auth-only', 'should output success msg' ) @@ -708,12 +655,6 @@ t.test('enable-2fa', t => { t.test('from token and set otp, retries on pending and verifies with qrcode', async t => { t.plan(4) - flatOptions.otp = '1234' - - npm.config.getCredentialsByURI = () => { - return { token: 'token' } - } - let setCount = 0 const npmProfile = { async get () { @@ -775,7 +716,7 @@ t.test('enable-2fa', t => { async password () { return 'password1234' }, - async otp (label) { + async otp () { return '123456' }, } @@ -785,26 +726,24 @@ t.test('enable-2fa', t => { generate: (url, cb) => cb('qrcode'), } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - 'qrcode-terminal': qrcode, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile, result } = await mockProfile(t, { + npmProfile, + qrcode, + readUserInfo, + config: { otp: '1234' }, }) - const profile = new Profile(npm) + + // XXX: use mock registry + npm.config.getCredentialsByURI = () => { + return { token: 'token' } + } await profile.exec(['enable-2fa', 'auth-only']) - t.matchSnapshot(result, 'should output 2fa enablement success msgs') + t.matchSnapshot(result(), 'should output 2fa enablement success msgs') }) t.test('from token and set otp, retrieves invalid otp', async t => { - flatOptions.otp = '1234' - - npm.config.getCredentialsByURI = () => { - return { token: 'token' } - } - const npmProfile = { async get () { return { @@ -831,12 +770,15 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile } = await mockProfile(t, { + npmProfile, + readUserInfo, + config: { otp: '1234' }, }) - const profile = new Profile(npm) + + npm.config.getCredentialsByURI = () => { + return { token: 'token' } + } await t.rejects( profile.exec(['enable-2fa', 'auth-only']), @@ -846,13 +788,6 @@ t.test('enable-2fa', t => { }) t.test('from token auth provides --otp config arg', async t => { - flatOptions.otp = '123456' - flatOptions.otp = '123456' - - npm.config.getCredentialsByURI = reg => { - return { token: 'token' } - } - const npmProfile = { async get () { return userProfile @@ -874,27 +809,26 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, + config: { otp: '123456' }, }) - const profile = new Profile(npm) + + npm.config.getCredentialsByURI = reg => { + return { token: 'token' } + } await profile.exec(['enable-2fa', 'auth-and-writes']) t.equal( - result, + result(), 'Two factor authentication mode changed to: auth-and-writes', 'should output success msg' ) }) t.test('missing tfa from user profile', async t => { - npm.config.getCredentialsByURI = reg => { - return { token: 'token' } - } - const npmProfile = { async get () { return { @@ -919,27 +853,25 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, }) - const profile = new Profile(npm) + + npm.config.getCredentialsByURI = reg => { + return { token: 'token' } + } await profile.exec(['enable-2fa', 'auth-only']) t.equal( - result, + result(), 'Two factor authentication mode changed to: auth-only', 'should output success msg' ) }) t.test('defaults to auth-and-writes permission if no mode specified', async t => { - npm.config.getCredentialsByURI = reg => { - return { token: 'token' } - } - const npmProfile = { async get () { return { @@ -964,25 +896,25 @@ t.test('enable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { npm, profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, }) - const profile = new Profile(npm) + + npm.config.getCredentialsByURI = reg => { + return { token: 'token' } + } await profile.exec(['enable-2fa']) t.equal( - result, + result(), 'Two factor authentication mode changed to: auth-and-writes', 'should enable 2fa with auth-and-writes permission' ) }) - - t.end() }) -t.test('disable-2fa', t => { +t.test('disable-2fa', async t => { t.test('no tfa enabled', async t => { const npmProfile = { async get () { @@ -993,17 +925,16 @@ t.test('disable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, + const { profile, result } = await mockProfile(t, { + npmProfile, }) - const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal(result, 'Two factor authentication not enabled.', 'should output already disalbed msg') + t.equal(result(), 'Two factor authentication not enabled.', + 'should output already disalbed msg') }) - t.test('requests otp', t => { + t.test('requests otp', async t => { const npmProfile = t => ({ async get () { return userProfile @@ -1019,14 +950,6 @@ t.test('disable-2fa', t => { }, 'should send the new info for setting in profile' ) - t.match( - conf, - { - ...npm.flatOptions, - otp: '1234', - }, - 'should forward flatOptions config' - ) }, }) @@ -1046,54 +969,52 @@ t.test('disable-2fa', t => { }) t.test('default output', async t => { - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), - '../../../lib/utils/read-user-info.js': readUserInfo(t), + t.plan(4) + + const { profile, result } = await mockProfile(t, { + npmProfile: npmProfile(t), + readUserInfo: readUserInfo(t), }) - const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal(result, 'Two factor authentication disabled.', 'should output already disabled msg') + t.equal(result(), 'Two factor authentication disabled.', 'should output already disabled msg') }) t.test('--json', async t => { - config.json = true + t.plan(4) - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), - '../../../lib/utils/read-user-info.js': readUserInfo(t), + const config = { json: true } + + const { profile, result } = await mockProfile(t, { + npmProfile: npmProfile(t), + readUserInfo: readUserInfo(t), + config, }) - const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.same(JSON.parse(result), { tfa: false }, 'should output json already disabled msg') + t.same(JSON.parse(result()), { tfa: false }, 'should output json already disabled msg') }) t.test('--parseable', async t => { - config.parseable = true + t.plan(4) + + const config = { parseable: true } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile(t), - '../../../lib/utils/read-user-info.js': readUserInfo(t), + const { profile, result } = await mockProfile(t, { + npmProfile: npmProfile(t), + readUserInfo: readUserInfo(t), + config, }) - const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal(result, 'tfa\tfalse', 'should output parseable already disabled msg') + t.equal(result(), 'tfa\tfalse', 'should output parseable already disabled msg') }) - - t.end() }) t.test('--otp config already set', async t => { - t.plan(3) - - flatOptions.otp = '123456' + t.plan(2) const npmProfile = { async get () { @@ -1110,14 +1031,6 @@ t.test('disable-2fa', t => { }, 'should send the new info for setting in profile' ) - t.match( - conf, - { - ...npm.flatOptions, - otp: '123456', - }, - 'should forward flatOptions config' - ) }, } @@ -1130,22 +1043,21 @@ t.test('disable-2fa', t => { }, } - const Profile = t.mock('../../../lib/commands/profile.js', { - ...mocks, - 'npm-profile': npmProfile, - '../../../lib/utils/read-user-info.js': readUserInfo, + const { profile, result } = await mockProfile(t, { + npmProfile, + readUserInfo, + config: { otp: '123456' }, }) - const profile = new Profile(npm) await profile.exec(['disable-2fa']) - t.equal(result, 'Two factor authentication disabled.', 'should output already disalbed msg') + t.equal(result(), 'Two factor authentication disabled.', 'should output already disalbed msg') }) - - t.end() }) t.test('unknown subcommand', async t => { + const { profile } = await mockProfile(t) + await t.rejects( profile.exec(['asfd']), /Unknown profile command: asfd/, @@ -1153,55 +1065,47 @@ t.test('unknown subcommand', async t => { ) }) -t.test('completion', t => { - const testComp = async ({ t, argv, expect, title }) => { +t.test('completion', async t => { + const testComp = async (t, { argv, expect, title } = {}) => { + const { npm } = await mockProfile(t) + const profile = await npm.cmd('profile') t.resolveMatch(profile.completion({ conf: { argv: { remain: argv } } }), expect, title) } t.test('npm profile autocomplete', async t => { - await testComp({ - t, + await testComp(t, { argv: ['npm', 'profile'], expect: ['enable-2fa', 'disable-2fa', 'get', 'set'], title: 'should auto complete with subcommands', }) - - t.end() }) t.test('npm profile enable autocomplete', async t => { - await testComp({ - t, + await testComp(t, { argv: ['npm', 'profile', 'enable-2fa'], expect: ['auth-and-writes', 'auth-only'], title: 'should auto complete with auth types', }) - - t.end() }) t.test('npm profile no autocomplete', async t => { const noAutocompleteCmds = ['disable-2fa', 'disable-tfa', 'get', 'set'] for (const subcmd of noAutocompleteCmds) { - await testComp({ - t, + await t.test(subcmd, t => testComp(t, { argv: ['npm', 'profile', subcmd], expect: [], title: `${subcmd} should have no autocomplete`, - }) + })) } - - t.end() }) t.test('npm profile unknown subcommand autocomplete', async t => { + const { npm } = await mockProfile(t) + const profile = await npm.cmd('profile') t.rejects( profile.completion({ conf: { argv: { remain: ['npm', 'profile', 'asdf'] } } }), { message: 'asdf not recognized' }, 'should throw unknown cmd error' ) - t.end() }) - - t.end() }) diff --git a/deps/npm/test/lib/commands/prune.js b/deps/npm/test/lib/commands/prune.js index a7f56547b105db..81245bcfca1671 100644 --- a/deps/npm/test/lib/commands/prune.js +++ b/deps/npm/test/lib/commands/prune.js @@ -13,7 +13,7 @@ t.test('should prune using Arborist', async (t) => { t.ok(true, 'prune is called') } }, - '../../lib/utils/reify-finish.js': (arb) => { + '{LIB}/utils/reify-finish.js': (arb) => { t.ok(arb, 'gets arborist tree') }, }, diff --git a/deps/npm/test/lib/commands/publish.js b/deps/npm/test/lib/commands/publish.js index 496c02394eb171..39696066130f9b 100644 --- a/deps/npm/test/lib/commands/publish.js +++ b/deps/npm/test/lib/commands/publish.js @@ -44,9 +44,6 @@ t.test('respects publishConfig.registry, runs appropriate scripts', async t => { publishConfig: { registry: alternateRegistry }, }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -103,9 +100,6 @@ t.test('re-loads publishConfig.registry if added during script process', async t publishConfig: { registry: alternateRegistry }, }), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -150,9 +144,6 @@ t.test('json', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -174,9 +165,6 @@ t.test('dry-run', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await npm.exec('publish', []) t.equal(joinedOutput(), `+ ${pkg}@1.0.0`) @@ -184,10 +172,8 @@ t.test('dry-run', async t => { }) t.test('shows usage with wrong set of arguments', async t => { - t.plan(1) - const Publish = t.mock('../../../lib/commands/publish.js') - const publish = new Publish({ config: { validate: () => {} } }) - + const { npm } = await loadMockNpm(t) + const publish = await npm.cmd('publish') await t.rejects(publish.exec(['a', 'b', 'c']), publish.usage) }) @@ -199,9 +185,6 @@ t.test('throws when invalid tag', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await t.rejects( npm.exec('publish', []), @@ -247,9 +230,6 @@ t.test('no auth default registry', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await t.rejects( npm.exec('publish', []), @@ -268,9 +248,6 @@ t.test('no auth dry-run', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await npm.exec('publish', []) t.matchSnapshot(joinedOutput()) @@ -286,9 +263,6 @@ t.test('no auth for configured registry', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await t.rejects( npm.exec('publish', []), @@ -302,7 +276,8 @@ t.test('no auth for configured registry', async t => { t.test('no auth for scope configured registry', async t => { const { npm } = await loadMockNpm(t, { config: { - '@npm:registry': alternateRegistry, + scope: '@npm', + registry: alternateRegistry, ...auth, }, prefixDir: { @@ -311,9 +286,6 @@ t.test('no auth for scope configured registry', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await t.rejects( npm.exec('publish', []), @@ -328,7 +300,8 @@ t.test('has token auth for scope configured registry', async t => { const spec = npa('@npm/test-package') const { npm, joinedOutput } = await loadMockNpm(t, { config: { - '@npm:registry': alternateRegistry, + scope: '@npm', + registry: alternateRegistry, [`${alternateRegistry.slice(6)}/:_authToken`]: 'test-scope-token', }, prefixDir: { @@ -337,9 +310,6 @@ t.test('has token auth for scope configured registry', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -357,7 +327,8 @@ t.test('has mTLS auth for scope configured registry', async t => { const spec = npa('@npm/test-package') const { npm, joinedOutput } = await loadMockNpm(t, { config: { - '@npm:registry': alternateRegistry, + scope: '@npm', + registry: alternateRegistry, [`${alternateRegistry.slice(6)}/:certfile`]: '/some.cert', [`${alternateRegistry.slice(6)}/:keyfile`]: '/some.key', }, @@ -367,9 +338,6 @@ t.test('has mTLS auth for scope configured registry', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -425,9 +393,6 @@ t.test('workspaces', t => { ...auth, workspaces: true, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) const registry = new MockRegistry({ @@ -457,9 +422,6 @@ t.test('workspaces', t => { color: 'always', workspaces: true, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) const registry = new MockRegistry({ @@ -488,9 +450,6 @@ t.test('workspaces', t => { ...auth, workspace: ['workspace-a'], }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) const registry = new MockRegistry({ @@ -512,9 +471,6 @@ t.test('workspaces', t => { ...auth, workspace: ['workspace-a'], }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) const registry = new MockRegistry({ @@ -535,9 +491,6 @@ t.test('workspaces', t => { ...auth, workspace: ['workspace-x'], }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) await t.rejects( @@ -553,9 +506,6 @@ t.test('workspaces', t => { workspaces: true, json: true, }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), prefixDir: dir, }) const registry = new MockRegistry({ @@ -596,9 +546,6 @@ t.test('ignore-scripts', async t => { }, }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -638,9 +585,6 @@ t.test('_auth config default registry', async t => { prefixDir: { 'package.json': JSON.stringify(pkgJson), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -665,9 +609,6 @@ t.test('bare _auth and registry config', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -682,7 +623,8 @@ t.test('bare _auth and registry config', async t => { t.test('bare _auth config scoped registry', async t => { const { npm } = await loadMockNpm(t, { config: { - '@npm:registry': alternateRegistry, + scope: '@npm', + registry: alternateRegistry, _auth: basic, }, prefixDir: { @@ -691,9 +633,6 @@ t.test('bare _auth config scoped registry', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) await t.rejects( npm.exec('publish', []), @@ -705,7 +644,8 @@ t.test('scoped _auth config scoped registry', async t => { const spec = npa('@npm/test-package') const { npm, joinedOutput } = await loadMockNpm(t, { config: { - '@npm:registry': alternateRegistry, + scope: '@npm', + registry: alternateRegistry, [`${alternateRegistry.slice(6)}/:_auth`]: basic, }, prefixDir: { @@ -714,9 +654,6 @@ t.test('scoped _auth config scoped registry', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -741,9 +678,6 @@ t.test('restricted access', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, @@ -772,9 +706,6 @@ t.test('public access', async t => { version: '1.0.0', }, null, 2), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => prefix, - }), }) const registry = new MockRegistry({ tap: t, diff --git a/deps/npm/test/lib/commands/query.js b/deps/npm/test/lib/commands/query.js index fb5b4843c34ee7..2b9a5b49763235 100644 --- a/deps/npm/test/lib/commands/query.js +++ b/deps/npm/test/lib/commands/query.js @@ -1,15 +1,8 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm') +const { cleanCwd } = require('../../fixtures/clean-snapshot.js') -t.cleanSnapshot = (str) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(str) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') - // normalize between windows and posix - .replace(new RegExp('lib/node_modules', 'g'), 'node_modules') -} +t.cleanSnapshot = (str) => cleanCwd(str) t.test('simple query', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { @@ -71,7 +64,7 @@ t.test('recursive tree', async t => { t.test('workspace query', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { config: { - workspaces: ['c'], + workspace: ['c'], }, prefixDir: { node_modules: { @@ -101,7 +94,7 @@ t.test('workspace query', async t => { }), }, }) - await npm.exec('query', [':scope'], ['c']) + await npm.exec('query', [':scope']) t.matchSnapshot(joinedOutput(), 'should return workspace object') }) @@ -109,7 +102,7 @@ t.test('include-workspace-root', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { config: { 'include-workspace-root': true, - workspaces: ['c'], + workspace: ['c'], }, prefixDir: { node_modules: { @@ -139,7 +132,7 @@ t.test('include-workspace-root', async t => { }), }, }) - await npm.exec('query', [':scope'], ['c']) + await npm.exec('query', [':scope']) t.matchSnapshot(joinedOutput(), 'should return workspace object and root object') }) t.test('linked node', async t => { @@ -171,8 +164,6 @@ t.test('global', async t => { config: { global: true, }, - // This is a global dir that works in both windows and non-windows, that's - // why it has two node_modules folders globalPrefixDir: { node_modules: { lorem: { @@ -182,16 +173,7 @@ t.test('global', async t => { }), }, }, - lib: { - node_modules: { - lorem: { - 'package.json': JSON.stringify({ - name: 'lorem', - version: '2.0.0', - }), - }, - }, - }, + }, }) await npm.exec('query', ['[name=lorem]']) diff --git a/deps/npm/test/lib/commands/rebuild.js b/deps/npm/test/lib/commands/rebuild.js index 3bfd3707f588c7..bda161772ddfc5 100644 --- a/deps/npm/test/lib/commands/rebuild.js +++ b/deps/npm/test/lib/commands/rebuild.js @@ -1,53 +1,32 @@ const t = require('tap') const fs = require('fs') const { resolve } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -let result = '' - -const config = { - global: false, -} -const npm = mockNpm({ - globalDir: '', - config, - prefix: '', - output: (...msg) => { - result += msg.join('\n') - }, -}) -const Rebuild = require('../../../lib/commands/rebuild.js') -const rebuild = new Rebuild(npm) - -t.afterEach(() => { - npm.prefix = '' - config.global = false - npm.globalDir = '' - result = '' -}) +const setupMockNpm = require('../../fixtures/mock-npm') t.test('no args', async t => { - const path = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - bin: 'cwd', - scripts: { - preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", - }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - bin: 'cwd', - scripts: { - preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", - }, - }), + const { npm, joinedOutput, prefix: path } = await setupMockNpm(t, { + prefixDir: { + node_modules: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + bin: 'cwd', + scripts: { + preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", + }, + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + bin: 'cwd', + scripts: { + preinstall: "node -e \"require('fs').writeFileSync('cwd', '')\"", + }, + }), + }, }, }, }) @@ -61,9 +40,7 @@ t.test('no args', async t => { t.throws(() => fs.statSync(aBinFile)) t.throws(() => fs.statSync(bBinFile)) - npm.prefix = path - - await rebuild.exec([]) + await npm.exec('rebuild', []) t.ok(() => fs.statSync(aBuildFile)) t.ok(() => fs.statSync(bBuildFile)) @@ -71,136 +48,141 @@ t.test('no args', async t => { t.ok(() => fs.statSync(bBinFile)) t.equal( - result, + joinedOutput(), 'rebuilt dependencies successfully', 'should output success msg' ) }) t.test('filter by pkg name', async t => { - const path = t.testdir({ - node_modules: { - a: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - bin: 'index.js', - }), - }, - b: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - bin: 'index.js', - }), + const { npm, prefix: path } = await setupMockNpm(t, { + prefixDir: { + node_modules: { + a: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + bin: 'index.js', + }), + }, + b: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + bin: 'index.js', + }), + }, }, }, }) - npm.prefix = path - const aBinFile = resolve(path, 'node_modules/.bin/a') const bBinFile = resolve(path, 'node_modules/.bin/b') t.throws(() => fs.statSync(aBinFile)) t.throws(() => fs.statSync(bBinFile)) - await rebuild.exec(['b']) + await npm.exec('rebuild', ['b']) t.throws(() => fs.statSync(aBinFile), 'should not link a bin') t.ok(() => fs.statSync(bBinFile), 'should link filtered pkg bin') }) t.test('filter by pkg@', async t => { - const path = t.testdir({ - node_modules: { - a: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - bin: 'index.js', - }), - node_modules: { - b: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'b', - version: '2.0.0', - bin: 'index.js', - }), + const { npm, prefix: path } = await setupMockNpm(t, { + prefixDir: { + node_modules: { + a: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + bin: 'index.js', + }), + node_modules: { + b: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'b', + version: '2.0.0', + bin: 'index.js', + }), + }, }, }, - }, - b: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - bin: 'index.js', - }), + b: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + bin: 'index.js', + }), + }, }, }, }) - npm.prefix = path - const bBinFile = resolve(path, 'node_modules/.bin/b') const nestedBinFile = resolve(path, 'node_modules/a/node_modules/.bin/b') - await rebuild.exec(['b@2']) + await npm.exec('rebuild', ['b@2']) t.throws(() => fs.statSync(bBinFile), 'should not link b bin') t.ok(() => fs.statSync(nestedBinFile), 'should link filtered pkg bin') }) t.test('filter by directory', async t => { - const path = t.testdir({ - node_modules: { - a: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - bin: 'index.js', - }), - }, - b: { - 'index.js': '', - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - bin: 'index.js', - }), + const { npm, prefix: path } = await setupMockNpm(t, { + prefixDir: { + node_modules: { + a: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + bin: 'index.js', + }), + }, + b: { + 'index.js': '', + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + bin: 'index.js', + }), + }, }, }, }) - npm.prefix = path - const aBinFile = resolve(path, 'node_modules/.bin/a') const bBinFile = resolve(path, 'node_modules/.bin/b') t.throws(() => fs.statSync(aBinFile)) t.throws(() => fs.statSync(bBinFile)) - await rebuild.exec(['file:node_modules/b']) + await npm.exec('rebuild', ['file:node_modules/b']) t.throws(() => fs.statSync(aBinFile), 'should not link a bin') t.ok(() => fs.statSync(bBinFile), 'should link filtered pkg bin') }) t.test('filter must be a semver version/range, or directory', async t => { + const { npm } = await setupMockNpm(t) + await t.rejects( - rebuild.exec(['git+ssh://github.com/npm/arborist']), + npm.exec('rebuild', ['git+ssh://github.com/npm/arborist']), /`npm rebuild` only supports SemVer version\/range specifiers/, 'should throw type error' ) }) t.test('global prefix', async t => { - const globalPath = t.testdir({ - lib: { + const { npm, globalPrefix, joinedOutput } = await setupMockNpm(t, { + config: { + global: true, + }, + globalPrefixDir: { node_modules: { a: { 'index.js': '', @@ -214,14 +196,11 @@ t.test('global prefix', async t => { }, }) - config.global = true - npm.globalDir = resolve(globalPath, 'lib', 'node_modules') - - await rebuild.exec([]) - t.ok(() => fs.statSync(resolve(globalPath, 'lib/node_modules/.bin/a'))) + await npm.exec('rebuild', []) + t.ok(() => fs.statSync(resolve(globalPrefix, 'lib/node_modules/.bin/a'))) t.equal( - result, + joinedOutput(), 'rebuilt dependencies successfully', 'should output success msg' ) diff --git a/deps/npm/test/lib/commands/repo.js b/deps/npm/test/lib/commands/repo.js index 86f1b8e27411f8..114cdf919510af 100644 --- a/deps/npm/test/lib/commands/repo.js +++ b/deps/npm/test/lib/commands/repo.js @@ -1,5 +1,5 @@ const t = require('tap') -const { load: _loadMockNpm } = require('../../fixtures/mock-npm.js') +const mockNpm = require('../../fixtures/mock-npm.js') const { sep } = require('path') const fixture = { @@ -180,24 +180,30 @@ const workspaceFixture = { }), } -// keep a tally of which urls got opened -let opened = {} -const openUrl = async (npm, url, errMsg) => { - opened[url] = opened[url] || 0 - opened[url]++ -} -t.afterEach(() => opened = {}) +const loadMockNpm = async (t, prefixDir, config = {}) => { + // keep a tally of which urls got opened + const opened = {} -const loadMockNpm = async (t, prefixDir) => { - const res = await _loadMockNpm(t, { - mocks: { '../../lib/utils/open-url.js': openUrl }, + const mock = await mockNpm(t, { + command: 'repo', + mocks: { + '{LIB}/utils/open-url.js': async (_, url) => { + opened[url] = opened[url] || 0 + opened[url]++ + }, + }, + config, prefixDir, }) - return res + + return { + ...mock, + opened, + } } t.test('open repo urls', async t => { - const { npm } = await loadMockNpm(t, fixture) + const { repo, opened } = await loadMockNpm(t, fixture) const expect = { hostedgit: 'https://github.com/foo/hostedgit', hostedgitat: 'https://github.com/foo/hostedgitat', @@ -224,22 +230,14 @@ t.test('open repo urls', async t => { directory: 'https://github.com/foo/test-repo-with-directory/tree/HEAD/some/directory', '.': 'https://example.com/thispkg', } - const keys = Object.keys(expect) - t.plan(keys.length) - keys.forEach(pkg => { - t.test(pkg, async t => { - await npm.exec('repo', [['.', pkg].join(sep)]) - const url = expect[pkg] - t.match({ - [url]: 1, - }, opened, `opened ${url}`, { opened }) - t.end() - }) - }) + for (const [pkg, url] of Object.entries(expect)) { + await repo.exec([['.', pkg].join(sep)]) + t.equal(opened[url], 1, `opened ${url}`) + } }) t.test('fail if cannot figure out repo url', async t => { - const { npm } = await loadMockNpm(t, fixture) + const { repo } = await loadMockNpm(t, fixture) const cases = [ 'norepo', @@ -248,37 +246,29 @@ t.test('fail if cannot figure out repo url', async t => { 'unhostedgitatobj', ] - t.plan(cases.length) - - cases.forEach(pkg => { - t.test(pkg, async t => { - t.rejects( - npm.exec('repo', [['.', pkg].join(sep)]), - { pkgid: pkg } - ) - }) - }) + for (const pkg of cases) { + await t.rejects( + repo.exec([['.', pkg].join(sep)]), + { pkgid: pkg } + ) + } }) t.test('open default package if none specified', async t => { - const { npm } = await loadMockNpm(t, fixture) - await npm.exec('repo', []) + const { repo, opened } = await loadMockNpm(t, fixture) + await repo.exec([]) t.equal(opened['https://example.com/thispkg'], 1, 'opened expected url', { opened }) }) t.test('workspaces', async t => { - const { npm } = await loadMockNpm(t, workspaceFixture) - - t.afterEach(() => { - npm.config.set('workspaces', null) - npm.config.set('workspace', []) - npm.config.set('include-workspace-root', false) - }) + const mockWorkspaces = (t, config) => loadMockNpm(t, workspaceFixture, config) t.test('include workspace root', async (t) => { - npm.config.set('workspaces', true) - npm.config.set('include-workspace-root', true) - await npm.exec('repo', []) + const { opened, repo } = await mockWorkspaces(t, { + workspaces: true, + 'include-workspace-root': true, + }) + await repo.exec([]) t.match({ 'https://github.com/npm/workspaces-test': 1, 'https://repo.workspace-a/': 1, // Gets translated to https! @@ -287,8 +277,10 @@ t.test('workspaces', async t => { }) t.test('all workspaces', async (t) => { - npm.config.set('workspaces', true) - await npm.exec('repo', []) + const { opened, repo } = await mockWorkspaces(t, { + workspaces: true, + }) + await repo.exec([]) t.match({ 'https://repo.workspace-a/': 1, // Gets translated to https! 'https://github.com/npm/workspace-b': 1, @@ -296,25 +288,31 @@ t.test('workspaces', async t => { }) t.test('one workspace', async (t) => { - npm.config.set('workspace', ['workspace-a']) - await npm.exec('repo', []) + const { opened, repo } = await mockWorkspaces(t, { + workspace: ['workspace-a'], + }) + await repo.exec([]) t.match({ 'https://repo.workspace-a/': 1, }, opened, 'opened one requested repo urls') }) t.test('invalid workspace', async (t) => { - npm.config.set('workspace', ['workspace-x']) + const { opened, repo } = await mockWorkspaces(t, { + workspace: ['workspace-x'], + }) await t.rejects( - npm.exec('repo', []), + repo.exec([]), /workspace-x/ ) t.match({}, opened, 'opened no repo urls') }) t.test('package arg and workspace', async (t) => { - npm.config.set('workspace', ['workspace-a']) - await npm.exec('repo', ['.']) + const { opened, repo } = await mockWorkspaces(t, { + workspace: ['workspace-x'], + }) + await repo.exec(['.']) t.match({ 'https://github.com/npm/workspaces-test': 1, }, opened, 'opened url for package arg, not workspace') diff --git a/deps/npm/test/lib/commands/restart.js b/deps/npm/test/lib/commands/restart.js index f9745acdd11b7d..b8b760675f7c5e 100644 --- a/deps/npm/test/lib/commands/restart.js +++ b/deps/npm/test/lib/commands/restart.js @@ -19,11 +19,11 @@ t.test('should run restart script from package.json', async t => { }, config: { loglevel: 'silent', - scriptShell: process.platform === 'win32' ? process.env.COMSPEC : 'sh', + 'script-shell': process.platform === 'win32' ? process.env.COMSPEC : 'sh', }, }) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'node ./test-restart.js foo'] : ['-c', 'node ./test-restart.js foo'] diff --git a/deps/npm/test/lib/commands/run-script.js b/deps/npm/test/lib/commands/run-script.js index 8aafebcaf8d110..a265db3cc040da 100644 --- a/deps/npm/test/lib/commands/run-script.js +++ b/deps/npm/test/lib/commands/run-script.js @@ -1,125 +1,84 @@ const t = require('tap') const { resolve } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const normalizePath = p => p.replace(/\\+/g, '/').replace(/\r\n/g, '\n') - -const cleanOutput = str => normalizePath(str).replace(normalizePath(process.cwd()), '{CWD}') - -const RUN_SCRIPTS = [] -const flatOptions = { - scriptShell: undefined, -} -const defaultLoglevel = 'info' -const config = { - json: false, - parseable: false, - 'if-present': false, - loglevel: defaultLoglevel, -} - -const npm = mockNpm({ - localPrefix: __dirname, - flatOptions, - config, - cmd: c => { - return { description: `test ${c} description` } - }, - output: (...msg) => output.push(msg), -}) - -const setLoglevel = (t, level) => { - npm.config.set('loglevel', level) - t.teardown(() => { - npm.config.set('loglevel', defaultLoglevel) +const realRunScript = require('@npmcli/run-script') +const mockNpm = require('../../fixtures/mock-npm') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + +const mockRs = async (t, { windows = false, runScript, ...opts } = {}) => { + let RUN_SCRIPTS = [] + + t.afterEach(() => RUN_SCRIPTS = []) + + const mock = await mockNpm(t, { + ...opts, + mocks: { + '@npmcli/run-script': Object.assign( + async rs => { + if (runScript) { + await runScript(rs) + } + RUN_SCRIPTS.push(rs) + }, + realRunScript + ), + '{LIB}/utils/is-windows.js': { isWindowsShell: windows }, + }, }) -} - -const output = [] - -const log = { - error: () => null, -} - -t.afterEach(() => { - npm.color = false - log.error = () => null - output.length = 0 - RUN_SCRIPTS.length = 0 - config['if-present'] = false - config.json = false - config.parseable = false -}) -const getRS = windows => { - const RunScript = t.mock('../../../lib/commands/run-script.js', { - '@npmcli/run-script': Object.assign( - async opts => { - RUN_SCRIPTS.push(opts) - }, - { - isServerPackage: require('@npmcli/run-script').isServerPackage, - } - ), - 'proc-log': log, - '../../../lib/utils/is-windows.js': { isWindowsShell: windows }, - }) - return new RunScript(npm) + return { + ...mock, + RUN_SCRIPTS: () => RUN_SCRIPTS, + runScript: { exec: (args) => mock.npm.exec('run-script', args) }, + cleanLogs: () => mock.logs.error.flat().map(v => v.toString()).map(cleanCwd), + } } -const runScript = getRS(false) -const runScriptWin = getRS(true) +t.test('completion', async t => { + const completion = async (t, remain, pkg) => { + const { npm } = await mockRs(t, + pkg ? { prefixDir: { 'package.json': JSON.stringify(pkg) } } : {} + ) + const cmd = await npm.cmd('run-script') + return cmd.completion({ conf: { argv: { remain } } }) + } -const { writeFileSync } = require('fs') -t.test('completion', t => { - const dir = t.testdir() - npm.localPrefix = dir t.test('already have a script name', async t => { - const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run', 'x'] } } }) + const res = await completion(t, ['npm', 'run', 'x']) t.equal(res, undefined) - t.end() }) t.test('no package.json', async t => { - const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) + const res = await completion(t, ['npm', 'run']) t.strictSame(res, []) - t.end() }) t.test('has package.json, no scripts', async t => { - writeFileSync(`${dir}/package.json`, JSON.stringify({})) - const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) + const res = await completion(t, ['npm', 'run'], {}) t.strictSame(res, []) - t.end() }) t.test('has package.json, with scripts', async t => { - writeFileSync( - `${dir}/package.json`, - JSON.stringify({ - scripts: { hello: 'echo hello', world: 'echo world' }, - }) - ) - const res = await runScript.completion({ conf: { argv: { remain: ['npm', 'run'] } } }) + const res = await completion(t, ['npm', 'run'], { + scripts: { hello: 'echo hello', world: 'echo world' }, + }) t.strictSame(res, ['hello', 'world']) - t.end() }) - t.end() }) t.test('fail if no package.json', async t => { - t.plan(2) - npm.localPrefix = t.testdir() + const { runScript } = await mockRs(t) await t.rejects(runScript.exec([]), { code: 'ENOENT' }) await t.rejects(runScript.exec(['test']), { code: 'ENOENT' }) }) -t.test('default env, start, and restart scripts', t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ name: 'x', version: '1.2.3' }), - 'server.js': 'console.log("hello, world")', +t.test('default env, start, and restart scripts', async t => { + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: 'x', version: '1.2.3' }), + 'server.js': 'console.log("hello, world")', + }, }) t.test('start', async t => { await runScript.exec(['start']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { path: npm.localPrefix, args: [], @@ -133,7 +92,7 @@ t.test('default env, start, and restart scripts', t => { t.test('env', async t => { await runScript.exec(['env']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { path: npm.localPrefix, args: [], @@ -152,31 +111,10 @@ t.test('default env, start, and restart scripts', t => { ]) }) - t.test('windows env', async t => { - await runScriptWin.exec(['env']) - t.match(RUN_SCRIPTS, [ - { - path: npm.localPrefix, - args: [], - scriptShell: undefined, - stdio: 'inherit', - pkg: { - name: 'x', - version: '1.2.3', - _id: 'x@1.2.3', - scripts: { - env: 'SET', - }, - }, - event: 'env', - }, - ]) - }) - t.test('restart', async t => { await runScript.exec(['restart']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { path: npm.localPrefix, args: [], @@ -194,23 +132,52 @@ t.test('default env, start, and restart scripts', t => { }, ]) }) - t.end() }) -t.test('non-default env script', t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { - env: 'hello', +t.test('default windows env', async t => { + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + windows: true, + prefixDir: { + 'package.json': JSON.stringify({ name: 'x', version: '1.2.3' }), + 'server.js': 'console.log("hello, world")', + }, + }) + await runScript.exec(['env']) + t.match(RUN_SCRIPTS(), [ + { + path: npm.localPrefix, + args: [], + scriptShell: undefined, + stdio: 'inherit', + pkg: { + name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'SET', + }, }, - }), + event: 'env', + }, + ]) +}) + +t.test('non-default env script', async t => { + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { + env: 'hello', + }, + }), + }, }) t.test('env', async t => { await runScript.exec(['env']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { path: npm.localPrefix, args: [], @@ -228,71 +195,98 @@ t.test('non-default env script', t => { }, ]) }) +}) - t.test('env windows', async t => { - await runScriptWin.exec(['env']) - t.match(RUN_SCRIPTS, [ - { - path: npm.localPrefix, - args: [], - scriptShell: undefined, - stdio: 'inherit', - pkg: { - name: 'x', - version: '1.2.3', - _id: 'x@1.2.3', - scripts: { - env: 'hello', - }, +t.test('non-default env script windows', async t => { + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + windows: true, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { + env: 'hello', }, - event: 'env', - }, - ]) + }), + }, }) - t.end() + + await runScript.exec(['env']) + + t.match(RUN_SCRIPTS(), [ + { + path: npm.localPrefix, + args: [], + scriptShell: undefined, + stdio: 'inherit', + pkg: { + name: 'x', + version: '1.2.3', + _id: 'x@1.2.3', + scripts: { + env: 'hello', + }, + }, + event: 'env', + }, + ]) }) -t.test('try to run missing script', t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - scripts: { hello: 'world' }, - bin: { goodnight: 'moon' }, - }), - }) - t.test('no suggestions', async t => { - await t.rejects(runScript.exec(['notevenclose']), 'Missing script: "notevenclose"') - }) - t.test('script suggestions', async t => { - await t.rejects(runScript.exec(['helo']), /Missing script: "helo"/) - await t.rejects(runScript.exec(['helo']), /npm run hello/) - }) - t.test('bin suggestions', async t => { - await t.rejects(runScript.exec(['goodneght']), /Missing script: "goodneght"/) - await t.rejects(runScript.exec(['goodneght']), /npm exec goodnight/) +t.test('try to run missing script', async t => { + t.test('errors', async t => { + const { runScript } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + scripts: { hello: 'world' }, + bin: { goodnight: 'moon' }, + }), + }, + }) + t.test('no suggestions', async t => { + await t.rejects(runScript.exec(['notevenclose']), 'Missing script: "notevenclose"') + }) + t.test('script suggestions', async t => { + await t.rejects(runScript.exec(['helo']), /Missing script: "helo"/) + await t.rejects(runScript.exec(['helo']), /npm run hello/) + }) + t.test('bin suggestions', async t => { + await t.rejects(runScript.exec(['goodneght']), /Missing script: "goodneght"/) + await t.rejects(runScript.exec(['goodneght']), /npm exec goodnight/) + }) }) + t.test('with --if-present', async t => { - config['if-present'] = true + const { runScript, RUN_SCRIPTS } = await mockRs(t, { + config: { 'if-present': true }, + prefixDir: { + 'package.json': JSON.stringify({ + scripts: { hello: 'world' }, + bin: { goodnight: 'moon' }, + }), + }, + }) await runScript.exec(['goodbye']) - t.strictSame(RUN_SCRIPTS, [], 'did not try to run anything') + t.strictSame(RUN_SCRIPTS(), [], 'did not try to run anything') }) - t.end() }) t.test('run pre/post hooks', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { - preenv: 'echo before the env', - postenv: 'echo after the env', - }, - }), + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { + preenv: 'echo before the env', + postenv: 'echo after the env', + }, + }), + }, }) await runScript.exec(['env']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { event: 'preenv' }, { path: npm.localPrefix, @@ -314,22 +308,23 @@ t.test('run pre/post hooks', async t => { }) t.test('skip pre/post hooks when using ignoreScripts', async t => { - config['ignore-scripts'] = true - - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { - preenv: 'echo before the env', - postenv: 'echo after the env', - }, - }), + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { + preenv: 'echo before the env', + postenv: 'echo after the env', + }, + }), + }, + config: { 'ignore-scripts': true }, }) await runScript.exec(['env']) - t.same(RUN_SCRIPTS, [ + t.same(RUN_SCRIPTS(), [ { path: npm.localPrefix, args: [], @@ -349,25 +344,25 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => { event: 'env', }, ]) - delete config['ignore-scripts'] }) t.test('run silent', async t => { - setLoglevel(t, 'silent') - - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { - preenv: 'echo before the env', - postenv: 'echo after the env', - }, - }), + const { npm, runScript, RUN_SCRIPTS } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { + preenv: 'echo before the env', + postenv: 'echo after the env', + }, + }), + }, + config: { silent: true }, }) await runScript.exec(['env']) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { event: 'preenv', stdio: 'inherit', @@ -395,7 +390,7 @@ t.test('run silent', async t => { ]) }) -t.test('list scripts', t => { +t.test('list scripts', async t => { const scripts = { test: 'exit 2', start: 'node server.js', @@ -403,16 +398,26 @@ t.test('list scripts', t => { preenv: 'echo before the env', postenv: 'echo after the env', } - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts, - }), - }) + + const mockList = async (t, config = {}) => { + const mock = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts, + }), + }, + config, + }) + + await mock.runScript.exec([]) + + return mock.outputs + } t.test('no args', async t => { - await runScript.exec([]) + const output = await mockList(t) t.strictSame( output, [ @@ -430,20 +435,17 @@ t.test('list scripts', t => { }) t.test('silent', async t => { - setLoglevel(t, 'silent') - await runScript.exec([]) - t.strictSame(output, []) + const outputs = await mockList(t, { silent: true }) + t.strictSame(outputs, []) }) t.test('warn json', async t => { - config.json = true - await runScript.exec([]) - t.strictSame(output, [[JSON.stringify(scripts, 0, 2)]], 'json report') + const outputs = await mockList(t, { json: true }) + t.strictSame(outputs, [[JSON.stringify(scripts, 0, 2)]], 'json report') }) t.test('parseable', async t => { - config.parseable = true - await runScript.exec([]) - t.strictSame(output, [ + const outputs = await mockList(t, { parseable: true }) + t.strictSame(outputs, [ ['test:exit 2'], ['start:node server.js'], ['stop:node kill-server.js'], @@ -451,32 +453,35 @@ t.test('list scripts', t => { ['postenv:echo after the env'], ]) }) - t.end() }) t.test('list scripts when no scripts', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - }), + const { runScript, outputs } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + }), + }, }) await runScript.exec([]) - t.strictSame(output, [], 'nothing to report') + t.strictSame(outputs, [], 'nothing to report') }) t.test('list scripts, only commands', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { preversion: 'echo doing the version dance' }, - }), + const { runScript, outputs } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { preversion: 'echo doing the version dance' }, + }), + }, }) await runScript.exec([]) - t.strictSame(output, [ + t.strictSame(outputs, [ ['Lifecycle scripts included in x@1.2.3:'], [' preversion\n echo doing the version dance'], [''], @@ -484,83 +489,104 @@ t.test('list scripts, only commands', async t => { }) t.test('list scripts, only non-commands', async t => { - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - scripts: { glorp: 'echo doing the glerp glop' }, - }), + const { runScript, outputs } = await mockRs(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'x', + version: '1.2.3', + scripts: { glorp: 'echo doing the glerp glop' }, + }), + }, }) await runScript.exec([]) - t.strictSame(output, [ + t.strictSame(outputs, [ ['Scripts available in x@1.2.3 via `npm run-script`:'], [' glorp\n echo doing the glerp glop'], [''], ]) }) -t.test('workspaces', t => { - npm.localPrefix = t.testdir({ - packages: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - scripts: { glorp: 'echo a doing the glerp glop' }, - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '2.0.0', - scripts: { glorp: 'echo b doing the glerp glop' }, - }), - }, - c: { - 'package.json': JSON.stringify({ - name: 'c', - version: '1.0.0', - scripts: { - test: 'exit 0', - posttest: 'echo posttest', - lorem: 'echo c lorem', +t.test('workspaces', async t => { + const mockWorkspaces = async (t, { + runScript, + prefixDir, + workspaces = true, + exec = [], + ...config + } = {}) => { + const mock = await mockRs(t, { + prefixDir: prefixDir || { + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + scripts: { glorp: 'echo a doing the glerp glop' }, + }), }, - }), - }, - d: { - 'package.json': JSON.stringify({ - name: 'd', - version: '1.0.0', - scripts: { - test: 'exit 0', - posttest: 'echo posttest', + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '2.0.0', + scripts: { glorp: 'echo b doing the glerp glop' }, + }), }, - }), - }, - e: { + c: { + 'package.json': JSON.stringify({ + name: 'c', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + lorem: 'echo c lorem', + }, + }), + }, + d: { + 'package.json': JSON.stringify({ + name: 'd', + version: '1.0.0', + scripts: { + test: 'exit 0', + posttest: 'echo posttest', + }, + }), + }, + e: { + 'package.json': JSON.stringify({ + name: 'e', + scripts: { test: 'exit 0', start: 'echo start something' }, + }), + }, + noscripts: { + 'package.json': JSON.stringify({ + name: 'noscripts', + version: '1.0.0', + }), + }, + }, 'package.json': JSON.stringify({ - name: 'e', - scripts: { test: 'exit 0', start: 'echo start something' }, + name: 'x', + version: '1.2.3', + workspaces: ['packages/*'], }), }, - noscripts: { - 'package.json': JSON.stringify({ - name: 'noscripts', - version: '1.0.0', - }), + config: { + ...Array.isArray(workspaces) ? { workspace: workspaces } : { workspaces }, + ...config, }, - }, - 'package.json': JSON.stringify({ - name: 'x', - version: '1.2.3', - workspaces: ['packages/*'], - }), - }) + runScript, + }) + if (exec) { + await mock.runScript.exec(exec) + } + return mock + } t.test('list all scripts', async t => { - await runScript.execWorkspaces([], []) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t) + t.strictSame(outputs, [ ['Scripts available in a@1.0.0 via `npm run-script`:'], [' glorp\n echo a doing the glerp glop'], [''], @@ -585,8 +611,8 @@ t.test('workspaces', t => { }) t.test('list regular scripts, filtered by name', async t => { - await runScript.execWorkspaces([], ['a', 'b']) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { workspaces: ['a', 'b'] }) + t.strictSame(outputs, [ ['Scripts available in a@1.0.0 via `npm run-script`:'], [' glorp\n echo a doing the glerp glop'], [''], @@ -597,8 +623,8 @@ t.test('workspaces', t => { }) t.test('list regular scripts, filtered by path', async t => { - await runScript.execWorkspaces([], ['./packages/a']) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { workspaces: ['./packages/a'] }) + t.strictSame(outputs, [ ['Scripts available in a@1.0.0 via `npm run-script`:'], [' glorp\n echo a doing the glerp glop'], [''], @@ -606,8 +632,8 @@ t.test('workspaces', t => { }) t.test('list regular scripts, filtered by parent folder', async t => { - await runScript.execWorkspaces([], ['./packages']) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { workspaces: ['./packages'] }) + t.strictSame(outputs, [ ['Scripts available in a@1.0.0 via `npm run-script`:'], [' glorp\n echo a doing the glerp glop'], [''], @@ -632,9 +658,8 @@ t.test('workspaces', t => { }) t.test('list all scripts with colors', async t => { - npm.color = true - await runScript.execWorkspaces([], []) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { color: 'always' }) + t.strictSame(outputs, [ [ /* eslint-disable-next-line max-len */ '\u001b[1mScripts\u001b[22m available in \x1B[32ma@1.0.0\x1B[39m via `\x1B[34mnpm run-script\x1B[39m`:', @@ -665,9 +690,8 @@ t.test('workspaces', t => { }) t.test('list all scripts --json', async t => { - config.json = true - await runScript.execWorkspaces([], []) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { json: true }) + t.strictSame(outputs, [ [ '{\n' + ' "a": {\n' + @@ -696,9 +720,8 @@ t.test('workspaces', t => { }) t.test('list all scripts --parseable', async t => { - config.parseable = true - await runScript.execWorkspaces([], []) - t.strictSame(output, [ + const { outputs } = await mockWorkspaces(t, { parseable: true }) + t.strictSame(outputs, [ ['a:glorp:echo a doing the glerp glop'], ['b:glorp:echo b doing the glerp glop'], ['c:test:exit 0'], @@ -712,15 +735,14 @@ t.test('workspaces', t => { }) t.test('list no scripts --loglevel=silent', async t => { - setLoglevel(t, 'silent') - await runScript.execWorkspaces([], []) - t.strictSame(output, []) + const { outputs } = await mockWorkspaces(t, { silent: true }) + t.strictSame(outputs, []) }) t.test('run scripts across all workspaces', async t => { - await runScript.execWorkspaces(['test'], []) + const { npm, RUN_SCRIPTS } = await mockWorkspaces(t, { exec: ['test'] }) - t.match(RUN_SCRIPTS, [ + t.match(RUN_SCRIPTS(), [ { path: resolve(npm.localPrefix, 'packages/c'), pkg: { name: 'c', version: '1.0.0' }, @@ -750,70 +772,65 @@ t.test('workspaces', t => { }) t.test('missing scripts in all workspaces', async t => { - const LOG = [] - log.error = err => { - LOG.push(String(err)) - } + const { runScript, RUN_SCRIPTS, cleanLogs } = await mockWorkspaces(t, { exec: null }) + await t.rejects( - runScript.execWorkspaces(['missing-script'], []), + runScript.exec(['missing-script']), /Missing script: missing-script/, 'should throw missing script error' ) - process.exitCode = 0 // clean exit code - - t.match(RUN_SCRIPTS, []) + t.match(RUN_SCRIPTS(), []) t.strictSame( - LOG.map(cleanOutput), + cleanLogs(), [ 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: a@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', + ' at location: {CWD}/prefix/packages/a', 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: b@2.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', + ' at location: {CWD}/prefix/packages/b', 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: c@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/c', + ' at location: {CWD}/prefix/packages/c', 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: d@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/d', + ' at location: {CWD}/prefix/packages/d', 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: e', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/e', + ' at location: {CWD}/prefix/packages/e', 'Lifecycle script `missing-script` failed with error:', 'Error: Missing script: "missing-script"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: noscripts@1.0.0', - /* eslint-disable-next-line max-len */ - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/noscripts', + ' at location: {CWD}/prefix/packages/noscripts', ], 'should log error msgs for each workspace script' ) }) t.test('missing scripts in some workspaces', async t => { - const LOG = [] - log.error = err => { - LOG.push(String(err)) - } - await runScript.execWorkspaces(['test'], ['a', 'b', 'c', 'd']) - t.match(RUN_SCRIPTS, []) + const { RUN_SCRIPTS, cleanLogs } = await mockWorkspaces(t, { + exec: ['test'], + workspaces: ['a', 'b', 'c', 'd'], + }) + + t.match(RUN_SCRIPTS(), []) t.strictSame( - LOG.map(cleanOutput), + cleanLogs(), [ 'Lifecycle script `test` failed with error:', 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: a@1.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/a', + ' at location: {CWD}/prefix/packages/a', 'Lifecycle script `test` failed with error:', 'Error: Missing script: "test"\n\nTo see a list of scripts, run:\n npm run', ' in workspace: b@2.0.0', - ' at location: {CWD}/test/lib/commands/tap-testdir-run-script-workspaces/packages/b', + ' at location: {CWD}/prefix/packages/b', ], 'should log error msgs for each workspace script' ) @@ -821,68 +838,76 @@ t.test('workspaces', t => { t.test('no workspaces when filtering by user args', async t => { await t.rejects( - runScript.execWorkspaces([], ['foo', 'bar']), + mockWorkspaces(t, { workspaces: ['foo', 'bar'] }), 'No workspaces found:\n --workspace=foo --workspace=bar', 'should throw error msg' ) }) t.test('no workspaces', async t => { - const _prevPrefix = npm.localPrefix - npm.localPrefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'foo', - version: '1.0.0', - }), - }) - await t.rejects( - runScript.execWorkspaces([], []), + mockWorkspaces(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }, + }), /No workspaces found!/, 'should throw error msg' ) - npm.localPrefix = _prevPrefix }) t.test('single failed workspace run', async t => { - const RunScript = t.mock('../../../lib/commands/run-script.js', { - '@npmcli/run-script': () => { + const { cleanLogs } = await mockWorkspaces(t, { + runScript: () => { throw new Error('err') }, - 'proc-log': log, - '../../../lib/utils/is-windows.js': { isWindowsShell: false }, + exec: ['test'], + workspaces: ['c'], }) - const runScript = new RunScript(npm) - await runScript.execWorkspaces(['test'], ['c']) - process.exitCode = 0 // clean up exit code + t.strictSame( + cleanLogs(), + [ + 'Lifecycle script `test` failed with error:', + 'Error: err', + ' in workspace: c@1.0.0', + ' at location: {CWD}/prefix/packages/c', + ], + 'should log error msgs for each workspace script' + ) }) t.test('failed workspace run with succeeded runs', async t => { - const RunScript = t.mock('../../../lib/commands/run-script.js', { - '@npmcli/run-script': async opts => { + const { cleanLogs, RUN_SCRIPTS, prefix } = await mockWorkspaces(t, { + runScript: (opts) => { if (opts.pkg.name === 'a') { throw new Error('ERR') } - - RUN_SCRIPTS.push(opts) }, - 'proc-log': log, - '../../../lib/utils/is-windows.js': { isWindowsShell: false }, + exec: ['glorp'], + workspaces: ['a', 'b'], }) - const runScript = new RunScript(npm) - await runScript.execWorkspaces(['glorp'], ['a', 'b']) - t.match(RUN_SCRIPTS, [ + t.strictSame( + cleanLogs(), + [ + 'Lifecycle script `glorp` failed with error:', + 'Error: ERR', + ' in workspace: a@1.0.0', + ' at location: {CWD}/prefix/packages/a', + ], + 'should log error msgs for each workspace script' + ) + + t.match(RUN_SCRIPTS(), [ { - path: resolve(npm.localPrefix, 'packages/b'), + path: resolve(prefix, 'packages/b'), pkg: { name: 'b', version: '2.0.0' }, event: 'glorp', }, ]) - - process.exitCode = 0 // clean up exit code }) - - t.end() }) diff --git a/deps/npm/test/lib/commands/set.js b/deps/npm/test/lib/commands/set.js index ce59870e2fec4a..69c4dd87056af9 100644 --- a/deps/npm/test/lib/commands/set.js +++ b/deps/npm/test/lib/commands/set.js @@ -1,60 +1,49 @@ const t = require('tap') - -// can't run this until npm set can save to project level npmrc -t.skip('npm set', async t => { - // XXX: convert to loadMockNpm - const { real: mockNpm } = require('../../fixtures/mock-npm') - const { joinedOutput, Npm } = mockNpm(t) - const npm = new Npm() - await npm.load() - - t.test('no args', async t => { - t.rejects(npm.exec('set', []), /Usage:/, 'prints usage') - }) - - t.test('test-config-item', async t => { - npm.localPrefix = t.testdir({}) - t.not( - npm.config.get('test-config-item', 'project'), - 'test config value', - 'config is not already new value' - ) - // This will write to ~/.npmrc! - // Don't unskip until we can write to project level - await npm.exec('set', ['test-config-item=test config value']) - t.equal(joinedOutput(), '', 'outputs nothing') - t.equal( - npm.config.get('test-config-item', 'project'), - 'test config value', - 'config is set to new value' - ) - }) +const fs = require('fs/promises') +const mockNpm = require('../../fixtures/mock-npm') +const { join } = require('path') +const { cleanNewlines } = require('../../fixtures/clean-snapshot') + +t.test('no args', async t => { + const { npm } = await mockNpm(t) + t.rejects(npm.exec('set', []), /Usage:/, 'prints usage') }) -// Everything after this can go away once the above test is unskipped - -let configArgs = null -const npm = { - exec: async (cmd, args) => { - if (cmd === 'config') { - configArgs = args - } - }, - config: { - validate: () => {}, - isDefault: () => {}, - }, -} - -const Set = t.mock('../../../lib/commands/set.js') -const set = new Set(npm) - -t.test('npm set - no args', async t => { - await t.rejects(set.exec([]), set.usage) -}) - -t.test('npm set', async t => { - await set.exec(['email', 'me@me.me']) +t.test('test-config-item', async t => { + const { npm, home, joinedOutput } = await mockNpm(t, { + homeDir: { + '.npmrc': 'original-config-test=original value', + }, + }) - t.strictSame(configArgs, ['set', 'email', 'me@me.me'], 'passed the correct arguments to config') + t.equal( + npm.config.get('original-config-test'), + 'original value', + 'original config is set from npmrc' + ) + + t.not( + npm.config.get('fund'), + false, + 'config is not already new value' + ) + + await npm.exec('set', ['fund=true']) + t.equal(joinedOutput(), '', 'outputs nothing') + + t.equal( + npm.config.get('fund'), + true, + 'config is set to new value' + ) + + t.equal( + cleanNewlines(await fs.readFile(join(home, '.npmrc'), 'utf-8')), + [ + 'original-config-test=original value', + 'fund=true', + '', + ].join('\n'), + 'npmrc is written with new value' + ) }) diff --git a/deps/npm/test/lib/commands/stars.js b/deps/npm/test/lib/commands/stars.js index 44de6ba1fb9607..124d2d344d8dae 100644 --- a/deps/npm/test/lib/commands/stars.js +++ b/deps/npm/test/lib/commands/stars.js @@ -1,34 +1,36 @@ const t = require('tap') +const realFetch = require('npm-registry-fetch') +const mockNpm = require('../../fixtures/mock-npm') -let result = '' +const noop = () => {} -const noop = () => null -const npm = { - config: { get () {}, validate: () => {} }, - flatOptions: {}, - output: (...msg) => { - result = [result, ...msg].join('\n') - }, -} -const npmFetch = { json: noop } -const log = { warn: noop } -const mocks = { - 'proc-log': log, - 'npm-registry-fetch': npmFetch, - '../../../lib/utils/get-identity.js': async () => 'foo', -} +const mockStars = async (t, { npmFetch = noop, exec = true, ...opts }) => { + const mock = await mockNpm(t, { + mocks: { + 'npm-registry-fetch': Object.assign(noop, realFetch, { json: npmFetch }), + '{LIB}/utils/get-identity.js': async () => 'foo', + }, + ...opts, + }) -const Stars = t.mock('../../../lib/commands/stars.js', mocks) -const stars = new Stars(npm) + const stars = { exec: (args) => mock.npm.exec('stars', args) } -t.afterEach(() => { - npm.config = { get () {} } - log.warn = noop - result = '' -}) + if (exec) { + await stars.exec(Array.isArray(exec) ? exec : []) + mock.result = mock.joinedOutput() + } + + return { + ...mock, + stars, + logs: () => mock.logs.filter(l => l[1] === 'stars').map(l => l[2]), + } +} t.test('no args', async t => { - npmFetch.json = async (uri, opts) => { + t.plan(3) + + const npmFetch = async (uri, opts) => { t.equal(uri, '/-/_view/starredByUser', 'should fetch from expected uri') t.equal(opts.query.key, '"foo"', 'should match logged in username') @@ -43,7 +45,7 @@ t.test('no args', async t => { } } - await stars.exec([]) + const { result } = await mockStars(t, { npmFetch, exec: true }) t.matchSnapshot( result, @@ -53,7 +55,8 @@ t.test('no args', async t => { t.test('npm star ', async t => { t.plan(3) - npmFetch.json = async (uri, opts) => { + + const npmFetch = async (uri, opts) => { t.equal(uri, '/-/_view/starredByUser', 'should fetch from expected uri') t.equal(opts.query.key, '"ruyadorno"', 'should match username') @@ -62,7 +65,7 @@ t.test('npm star ', async t => { } } - await stars.exec(['ruyadorno']) + const { result } = await mockStars(t, { npmFetch, exec: ['ruyadorno'] }) t.match( result, @@ -72,22 +75,14 @@ t.test('npm star ', async t => { }) t.test('unauthorized request', async t => { - t.plan(4) - npmFetch.json = async () => { + const npmFetch = async () => { throw Object.assign( new Error('Not logged in'), { code: 'ENEEDAUTH' } ) } - log.warn = (title, msg) => { - t.equal(title, 'stars', 'should use expected title') - t.equal( - msg, - 'auth is required to look up your username', - 'should warn auth required msg' - ) - } + const { joinedOutput, stars, logs } = await mockStars(t, { npmFetch, exec: false }) await t.rejects( stars.exec([]), @@ -95,41 +90,43 @@ t.test('unauthorized request', async t => { 'should throw unauthorized request msg' ) + t.strictSame( + logs(), + ['auth is required to look up your username'], + 'should warn auth required msg' + ) + t.equal( - result, + joinedOutput(), '', 'should have empty output' ) }) t.test('unexpected error', async t => { - npmFetch.json = async () => { + const npmFetch = async () => { throw new Error('ERROR') } - log.warn = (title, msg) => { - throw new Error('Should not output extra warning msgs') - } + const { stars, logs } = await mockStars(t, { npmFetch, exec: false }) await t.rejects( stars.exec([]), /ERROR/, 'should throw unexpected error message' ) + + t.strictSame(logs(), [], 'no logs') }) t.test('no pkg starred', async t => { - t.plan(2) - npmFetch.json = async (uri, opts) => ({ rows: [] }) - - log.warn = (title, msg) => { - t.equal(title, 'stars', 'should use expected title') - t.equal( - msg, - 'user has not starred any packages', - 'should warn no starred packages msg' - ) - } + const npmFetch = async () => ({ rows: [] }) - await stars.exec([]) + const { logs } = await mockStars(t, { npmFetch, exec: true }) + + t.strictSame( + logs(), + ['user has not starred any packages'], + 'should warn no starred packages msg' + ) }) diff --git a/deps/npm/test/lib/commands/start.js b/deps/npm/test/lib/commands/start.js index 47f7f1a6e0f517..b0e908b6ae372b 100644 --- a/deps/npm/test/lib/commands/start.js +++ b/deps/npm/test/lib/commands/start.js @@ -19,11 +19,11 @@ t.test('should run start script from package.json', async t => { }, config: { loglevel: 'silent', - scriptShell: process.platform === 'win32' ? process.env.COMSPEC : 'sh', + 'script-shell': process.platform === 'win32' ? process.env.COMSPEC : 'sh', }, }) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'node ./test-start.js foo'] : ['-c', 'node ./test-start.js foo'] diff --git a/deps/npm/test/lib/commands/stop.js b/deps/npm/test/lib/commands/stop.js index 9ca774288446b4..560f7deb75cc90 100644 --- a/deps/npm/test/lib/commands/stop.js +++ b/deps/npm/test/lib/commands/stop.js @@ -19,11 +19,11 @@ t.test('should run stop script from package.json', async t => { }, config: { loglevel: 'silent', - scriptShell: process.platform === 'win32' ? process.env.COMSPEC : 'sh', + 'script-shell': process.platform === 'win32' ? process.env.COMSPEC : 'sh', }, }) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'node ./test-stop.js foo'] : ['-c', 'node ./test-stop.js foo'] diff --git a/deps/npm/test/lib/commands/team.js b/deps/npm/test/lib/commands/team.js index 792418788bcd10..a13a56d986e35e 100644 --- a/deps/npm/test/lib/commands/team.js +++ b/deps/npm/test/lib/commands/team.js @@ -1,39 +1,33 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -let result = '' -const libnpmteam = { - async add () {}, - async create () {}, - async destroy () {}, - async lsTeams () {}, - async lsUsers () {}, - async rm () {}, -} -const npm = mockNpm({ - flatOptions: {}, - config: { - loglevel: 'info', - }, - output: (...msg) => { - result += msg.join('\n') - }, -}) -const mocks = { - libnpmteam, - 'cli-columns': a => a.join(' '), -} - -t.afterEach(() => { - result = '' - npm.flatOptions = {} - npm.config.set('loglevel', 'info') -}) +const mockNpm = require('../../fixtures/mock-npm') + +t.cleanSnapshot = s => s.trim().replace(/\n+/g, '\n') + +const mockTeam = async (t, { libnpmteam, ...opts } = {}) => { + const mock = await mockNpm(t, { + ...opts, + mocks: { + // XXX: this should be refactored to use the mock registry + libnpmteam: libnpmteam || { + async add () {}, + async create () {}, + async destroy () {}, + async lsTeams () {}, + async lsUsers () {}, + async rm () {}, + }, + }, + }) -const Team = t.mock('../../../lib/commands/team.js', mocks) -const team = new Team(npm) + return { + ...mock, + team: { exec: (args) => mock.npm.exec('team', args) }, + result: () => mock.joinedOutput(), + } +} t.test('no args', async t => { + const { team } = await mockTeam(t) await t.rejects( team.exec([]), 'usage instructions', @@ -43,29 +37,35 @@ t.test('no args', async t => { t.test('team add ', async t => { t.test('default output', async t => { + const { team, result } = await mockTeam(t) + await team.exec(['add', '@npmcli:developers', 'foo']) - t.matchSnapshot(result, 'should output success result for add user') + t.matchSnapshot(result(), 'should output success result for add user') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + config: { parseable: true }, + }) await team.exec(['add', '@npmcli:developers', 'foo']) t.matchSnapshot( - result, + result(), 'should output success result for parseable add user' ) }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + config: { json: true }, + }) await team.exec(['add', '@npmcli:developers', 'foo']) t.same( - JSON.parse(result), + JSON.parse(result()), { added: true, team: 'npmcli:developers', @@ -76,39 +76,47 @@ t.test('team add ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + config: { silent: true }, + }) await team.exec(['add', '@npmcli:developers', 'foo']) - t.same(result, '', 'should not output success if silent') + t.same(result(), '', 'should not output success if silent') }) }) t.test('team create ', async t => { t.test('default output', async t => { + const { team, result } = await mockTeam(t) + await team.exec(['create', '@npmcli:newteam']) - t.matchSnapshot(result, 'should output success result for create team') + t.matchSnapshot(result(), 'should output success result for create team') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + config: { parseable: true }, + }) await team.exec(['create', '@npmcli:newteam']) t.matchSnapshot( - result, + result(), 'should output parseable success result for create team' ) }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + config: { json: true }, + }) await team.exec(['create', '@npmcli:newteam']) t.same( - JSON.parse(result), + JSON.parse(result()), { created: true, team: 'npmcli:newteam', @@ -118,31 +126,38 @@ t.test('team create ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + config: { silent: true }, + }) await team.exec(['create', '@npmcli:newteam']) - t.same(result, '', 'should not output create success if silent') + t.same(result(), '', 'should not output create success if silent') }) }) t.test('team destroy ', async t => { t.test('default output', async t => { + const { team, result } = await mockTeam(t) await team.exec(['destroy', '@npmcli:newteam']) - t.matchSnapshot(result, 'should output success result for destroy team') + t.matchSnapshot(result(), 'should output success result for destroy team') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + config: { parseable: true }, + }) await team.exec(['destroy', '@npmcli:newteam']) - t.matchSnapshot(result, 'should output parseable result for destroy team') + t.matchSnapshot(result(), 'should output parseable result for destroy team') }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + config: { json: true }, + }) await team.exec(['destroy', '@npmcli:newteam']) t.same( - JSON.parse(result), + JSON.parse(result()), { deleted: true, team: 'npmcli:newteam', @@ -152,14 +167,16 @@ t.test('team destroy ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + config: { silent: true }, + }) await team.exec(['destroy', '@npmcli:newteam']) - t.same(result, '', 'should not output destroy if silent') + t.same(result(), '', 'should not output destroy if silent') }) }) t.test('team ls ', async t => { - const libnpmteam = { + const teams = { async lsTeams () { return [ 'npmcli:developers', @@ -169,28 +186,43 @@ t.test('team ls ', async t => { }, } - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, - }) - const team = new Team(npm) + const noTeam = { + async lsTeams () { + return [] + }, + } + + const singleTeam = { + async lsTeams () { + return ['npmcli:developers'] + }, + } t.test('default output', async t => { + const { team, result } = await mockTeam(t, { + libnpmteam: teams, + }) await team.exec(['ls', '@npmcli']) - t.matchSnapshot(result, 'should list teams for a given scope') + t.matchSnapshot(result(), 'should list teams for a given scope') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + libnpmteam: teams, + config: { parseable: true }, + }) await team.exec(['ls', '@npmcli']) - t.matchSnapshot(result, 'should list teams for a parseable scope') + t.matchSnapshot(result(), 'should list teams for a parseable scope') }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + libnpmteam: teams, + config: { json: true }, + }) await team.exec(['ls', '@npmcli']) t.same( - JSON.parse(result), + JSON.parse(result()), [ 'npmcli:designers', 'npmcli:developers', @@ -201,75 +233,78 @@ t.test('team ls ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + libnpmteam: teams, + config: { silent: true }, + }) await team.exec(['ls', '@npmcli']) - t.same(result, '', 'should not list teams if silent') + t.same(result(), '', 'should not list teams if silent') }) t.test('no teams', async t => { - const libnpmteam = { - async lsTeams () { - return [] - }, - } - - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, + const { team, result } = await mockTeam(t, { + libnpmteam: noTeam, }) - const team = new Team(npm) await team.exec(['ls', '@npmcli']) - t.matchSnapshot(result, 'should list no teams for a given scope') + t.matchSnapshot(result(), 'should list no teams for a given scope') }) t.test('single team', async t => { - const libnpmteam = { - async lsTeams () { - return ['npmcli:developers'] - }, - } - - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, + const { team, result } = await mockTeam(t, { + libnpmteam: singleTeam, }) - const team = new Team(npm) await team.exec(['ls', '@npmcli']) - t.matchSnapshot(result, 'should list single team for a given scope') + t.matchSnapshot(result(), 'should list single team for a given scope') }) }) t.test('team ls ', async t => { - const libnpmteam = { + const users = { async lsUsers () { return ['nlf', 'ruyadorno', 'darcyclarke', 'isaacs'] }, } - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, - }) - const team = new Team(npm) + + const singleUser = { + async lsUsers () { + return ['foo'] + }, + } + + const noUsers = { + async lsUsers () { + return [] + }, + } t.test('default output', async t => { + const { team, result } = await mockTeam(t, { + libnpmteam: users, + }) await team.exec(['ls', '@npmcli:developers']) - t.matchSnapshot(result, 'should list users for a given scope:team') + t.matchSnapshot(result(), 'should list users for a given scope:team') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + libnpmteam: users, + config: { parseable: true }, + }) await team.exec(['ls', '@npmcli:developers']) - t.matchSnapshot(result, 'should list users for a parseable scope:team') + t.matchSnapshot(result(), 'should list users for a parseable scope:team') }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + libnpmteam: users, + config: { json: true }, + }) await team.exec(['ls', '@npmcli:developers']) t.same( - JSON.parse(result), + JSON.parse(result()), [ 'darcyclarke', 'isaacs', @@ -281,63 +316,55 @@ t.test('team ls ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + libnpmteam: users, + config: { silent: true }, + }) await team.exec(['ls', '@npmcli:developers']) - t.same(result, '', 'should not output users if silent') + t.same(result(), '', 'should not output users if silent') }) t.test('no users', async t => { - const libnpmteam = { - async lsUsers () { - return [] - }, - } - - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, + const { team, result } = await mockTeam(t, { + libnpmteam: noUsers, }) - const team = new Team(npm) await team.exec(['ls', '@npmcli:developers']) - t.matchSnapshot(result, 'should list no users for a given scope') + t.matchSnapshot(result(), 'should list no users for a given scope') }) t.test('single user', async t => { - const libnpmteam = { - async lsUsers () { - return ['foo'] - }, - } - - const Team = t.mock('../../../lib/commands/team.js', { - ...mocks, - libnpmteam, + const { team, result } = await mockTeam(t, { + libnpmteam: singleUser, }) - const team = new Team(npm) await team.exec(['ls', '@npmcli:developers']) - t.matchSnapshot(result, 'should list single user for a given scope') + t.matchSnapshot(result(), 'should list single user for a given scope') }) }) t.test('team rm ', async t => { t.test('default output', async t => { + const { team, result } = await mockTeam(t) await team.exec(['rm', '@npmcli:newteam', 'foo']) - t.matchSnapshot(result, 'should output success result for remove user') + t.matchSnapshot(result(), 'should output success result for remove user') }) t.test('--parseable', async t => { - npm.flatOptions.parseable = true + const { team, result } = await mockTeam(t, { + config: { parseable: true }, + }) await team.exec(['rm', '@npmcli:newteam', 'foo']) - t.matchSnapshot(result, 'should output parseable result for remove user') + t.matchSnapshot(result(), 'should output parseable result for remove user') }) t.test('--json', async t => { - npm.flatOptions.json = true + const { team, result } = await mockTeam(t, { + config: { json: true }, + }) await team.exec(['rm', '@npmcli:newteam', 'foo']) t.same( - JSON.parse(result), + JSON.parse(result()), { removed: true, team: 'npmcli:newteam', @@ -348,14 +375,17 @@ t.test('team rm ', async t => { }) t.test('--silent', async t => { - npm.config.set('loglevel', 'silent') + const { team, result } = await mockTeam(t, { + config: { silent: true }, + }) await team.exec(['rm', '@npmcli:newteam', 'foo']) - t.same(result, '', 'should not output rm result if silent') + t.same(result(), '', 'should not output rm result if silent') }) }) -t.test('completion', t => { - const { completion } = team +t.test('completion', async t => { + const { npm } = await mockTeam(t) + const { completion } = await npm.cmd('team') t.test('npm team autocomplete', async t => { const res = await completion({ diff --git a/deps/npm/test/lib/commands/test.js b/deps/npm/test/lib/commands/test.js index 3a62b6a2d31b89..4786d72de2725e 100644 --- a/deps/npm/test/lib/commands/test.js +++ b/deps/npm/test/lib/commands/test.js @@ -19,11 +19,11 @@ t.test('should run test script from package.json', async t => { }, config: { loglevel: 'silent', - scriptShell: process.platform === 'win32' ? process.env.COMSPEC : 'sh', + 'script-shell': process.platform === 'win32' ? process.env.COMSPEC : 'sh', }, }) - const scriptShell = npm.config.get('scriptShell') + const scriptShell = npm.config.get('script-shell') const scriptArgs = isCmdRe.test(scriptShell) ? ['/d', '/s', '/c', 'node ./test-test.js foo'] : ['-c', 'node ./test-test.js foo'] diff --git a/deps/npm/test/lib/commands/token.js b/deps/npm/test/lib/commands/token.js index af53f49a130f5a..1fd686a4427c9b 100644 --- a/deps/npm/test/lib/commands/token.js +++ b/deps/npm/test/lib/commands/token.js @@ -1,73 +1,43 @@ const t = require('tap') +const mockNpm = require('../../fixtures/mock-npm') -const mocks = { - profile: {}, - output: () => {}, - readUserInfo: {}, -} -const npm = { - output: (...args) => mocks.output(...args), - config: { validate: () => {} }, -} +const mockToken = async (t, { profile, getCredentialsByURI, readUserInfo, ...opts } = {}) => { + const mocks = {} -const mockToken = (otherMocks) => t.mock('../../../lib/commands/token.js', { - '../../../lib/utils/read-user-info.js': mocks.readUserInfo, - 'npm-profile': mocks.profile, - ...otherMocks, -}) + if (profile) { + mocks['npm-profile'] = profile + } -const tokenWithMocks = (options = {}) => { - const { log, ...mockRequests } = options - - for (const mod in mockRequests) { - if (mod === 'npm') { - mockRequests.npm = { ...npm, ...mockRequests.npm } - mockRequests.npm.config.validate = () => {} - } else { - if (typeof mockRequests[mod] === 'function') { - mocks[mod] = mockRequests[mod] - } else { - for (const key in mockRequests[mod]) { - mocks[mod][key] = mockRequests[mod][key] - } - } - } + if (readUserInfo) { + mocks['{LIB}/utils/read-user-info.js'] = readUserInfo } - const reset = () => { - for (const mod in mockRequests) { - if (mod !== 'npm') { - if (typeof mockRequests[mod] === 'function') { - mocks[mod] = () => {} - } else { - for (const key in mockRequests[mod]) { - delete mocks[mod][key] - } - } - } - } + const mock = await mockNpm(t, { + ...opts, + mocks, + }) + + // XXX: replace with mock registry + if (getCredentialsByURI) { + mock.npm.config.getCredentialsByURI = getCredentialsByURI } - const MockedToken = mockToken(log ? { - 'proc-log': { - info: log.info, - }, - npmlog: { - gauge: log.gauge, - newItem: log.newItem, - }, - } : {}) - const token = new MockedToken(mockRequests.npm || npm) - return [token, reset] -} + const token = { + exec: (args) => mock.npm.exec('token', args), + } -t.test('completion', t => { - t.plan(5) + return { + ...mock, + token, + } +} - const [token] = tokenWithMocks() +t.test('completion', async t => { + const { npm } = await mockToken(t) + const { completion } = await npm.cmd('token') const testComp = (argv, expect) => { - t.resolveMatch(token.completion({ conf: { argv: { remain: argv } } }), expect, argv.join(' ')) + t.resolveMatch(completion({ conf: { argv: { remain: argv } } }), expect, argv.join(' ')) } testComp(['npm', 'token'], ['list', 'revoke', 'create']) @@ -75,32 +45,18 @@ t.test('completion', t => { testComp(['npm', 'token', 'revoke'], []) testComp(['npm', 'token', 'create'], []) - t.rejects(token.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), { + t.rejects(completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), { message: 'foobar not recognize', }) }) t.test('token foobar', async t => { - t.plan(2) - - const [token, reset] = tokenWithMocks({ - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'shows a gauge') - }, - }, - }, - }) - - t.teardown(reset) + const { token } = await mockToken(t) await t.rejects(token.exec(['foobar']), /foobar is not a recognized subcommand/) }) t.test('token list', async t => { - t.plan(14) - const now = new Date().toISOString() const tokens = [ { @@ -121,15 +77,11 @@ t.test('token list', async t => { }, ] - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', otp: '123456' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', otp: '123456' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: conf => { @@ -137,39 +89,23 @@ t.test('token list', async t => { return tokens }, }, - log: { - gauge: { - show: name => { - t.equal(name, 'token') - }, - }, - info: (type, msg) => { - t.equal(type, 'token') - t.equal(msg, 'getting list') - }, - }, - output: spec => { - const lines = spec.split(/\r?\n/) - t.match(lines[3], ' abcd123 ', 'includes the trimmed key') - t.match(lines[3], ' efgh56… ', 'includes the trimmed token') - t.match(lines[3], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp') - t.match(lines[3], ' no ', 'includes the "no" string for readonly state') - t.match(lines[5], ' abcd125 ', 'includes the trimmed key') - t.match(lines[5], ' hgfe87… ', 'includes the trimmed token') - t.match(lines[5], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp') - t.match(lines[5], ' yes ', 'includes the "no" string for readonly state') - t.match(lines[5], ` ${tokens[1].cidr_whitelist.join(',')} `, 'includes the cidr whitelist') - }, }) - t.teardown(reset) - await token.exec([]) + + const lines = joinedOutput().split(/\r?\n/) + t.match(lines[3], ' abcd123 ', 'includes the trimmed key') + t.match(lines[3], ' efgh56… ', 'includes the trimmed token') + t.match(lines[3], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp') + t.match(lines[3], ' no ', 'includes the "no" string for readonly state') + t.match(lines[5], ' abcd125 ', 'includes the trimmed key') + t.match(lines[5], ' hgfe87… ', 'includes the trimmed token') + t.match(lines[5], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp') + t.match(lines[5], ' yes ', 'includes the "no" string for readonly state') + t.match(lines[5], ` ${tokens[1].cidr_whitelist.join(',')} `, 'includes the cidr whitelist') }) t.test('token list json output', async t => { - t.plan(7) - const now = new Date().toISOString() const tokens = [ { @@ -182,15 +118,11 @@ t.test('token list json output', async t => { }, ] - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', json: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { username: 'foo', password: 'bar' } - }, - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', json: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { username: 'foo', password: 'bar' } }, profile: { listTokens: conf => { @@ -202,32 +134,16 @@ t.test('token list json output', async t => { return tokens }, }, - log: { - gauge: { - show: name => { - t.equal(name, 'token') - }, - }, - info: (type, msg) => { - t.equal(type, 'token') - t.equal(msg, 'getting list') - }, - }, - output: spec => { - t.type(spec, 'string', 'is called with a string') - const parsed = JSON.parse(spec) - t.match(parsed, tokens, 'prints the json parsed tokens') - }, - }) - t.teardown(reset) + }) await token.exec(['list']) + + const parsed = JSON.parse(joinedOutput()) + t.match(parsed, tokens, 'prints the json parsed tokens') }) t.test('token list parseable output', async t => { - t.plan(11) - const now = new Date().toISOString() const tokens = [ { @@ -248,17 +164,11 @@ t.test('token list parseable output', async t => { }, ] - let callCount = 0 - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { auth: Buffer.from('foo:bar').toString('base64') } - }, - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', parseable: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { auth: Buffer.from('foo:bar').toString('base64') } }, profile: { listTokens: conf => { @@ -270,82 +180,43 @@ t.test('token list parseable output', async t => { return tokens }, }, - log: { - gauge: { - show: name => { - t.equal(name, 'token') - }, - }, - info: (type, msg) => { - t.equal(type, 'token') - t.equal(msg, 'getting list') - }, - }, - output: spec => { - ++callCount - t.type(spec, 'string', 'is called with a string') - if (callCount === 1) { - t.equal( - spec, - ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), - 'prints header' - ) - } else if (callCount === 2) { - t.equal( - spec, - [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), - 'prints token info' - ) - } else { - t.equal( - spec, - [ - tokens[1].key, - tokens[1].token, - tokens[1].created, - tokens[1].readonly, - tokens[1].cidr_whitelist.join(','), - ].join('\t'), - 'prints token info' - ) - } - }, }) - t.teardown(reset) - await token.exec(['list']) + + const lines = joinedOutput().split(/\r?\n/) + + t.equal( + lines[0], + ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), + 'prints header' + ) + + t.equal( + lines[1], + [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), + 'prints token info' + ) + + t.equal( + lines[2], + [ + tokens[1].key, + tokens[1].token, + tokens[1].created, + tokens[1].readonly, + tokens[1].cidr_whitelist.join(','), + ].join('\t'), + 'prints token info' + ) }) t.test('token revoke', async t => { - t.plan(9) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return {} - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return {} }, profile: { listTokens: conf => { @@ -356,45 +227,19 @@ t.test('token revoke', async t => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: spec => { - t.equal(spec, 'Removed 1 token') - }, }) - t.teardown(reset) - await token.exec(['rm', 'abcd']) + + t.equal(joinedOutput(), 'Removed 1 token') }) t.test('token revoke multiple tokens', async t => { - t.plan(9) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234' }, { key: 'efgh5678' }]), @@ -403,45 +248,19 @@ t.test('token revoke multiple tokens', async t => { t.ok(['abcd1234', 'efgh5678'].includes(key), 'deletes the correct token') }, }, - output: spec => { - t.equal(spec, 'Removed 2 tokens') - }, }) - t.teardown(reset) - await token.exec(['revoke', 'abcd', 'efgh']) + + t.equal(joinedOutput(), 'Removed 2 tokens') }) t.test('token revoke json output', async t => { - t.plan(9) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', json: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', json: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), @@ -449,47 +268,21 @@ t.test('token revoke json output', async t => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: spec => { - t.type(spec, 'string', 'is given a string') - const parsed = JSON.parse(spec) - t.same(parsed, ['abcd1234'], 'logs the token as json') - }, - }) - t.teardown(reset) + }) await token.exec(['delete', 'abcd']) + + const parsed = JSON.parse(joinedOutput()) + t.same(parsed, ['abcd1234'], 'logs the token as json') }) t.test('token revoke parseable output', async t => { - t.plan(8) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', parseable: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), @@ -497,45 +290,19 @@ t.test('token revoke parseable output', async t => { t.equal(key, 'abcd1234', 'deletes the correct token') }, }, - output: spec => { - t.equal(spec, 'abcd1234', 'logs the token as a string') - }, }) - t.teardown(reset) - await token.exec(['remove', 'abcd']) + + t.equal(joinedOutput(), 'abcd1234', 'logs the token as a string') }) t.test('token revoke by token', async t => { - t.plan(8) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234', token: 'efgh5678' }]), @@ -543,143 +310,60 @@ t.test('token revoke by token', async t => { t.equal(key, 'efgh5678', 'passes through user input') }, }, - output: spec => { - t.equal(spec, 'Removed 1 token') - }, }) - t.teardown(reset) - await token.exec(['rm', 'efgh5678']) + t.equal(joinedOutput(), 'Removed 1 token') }) t.test('token revoke requires an id', async t => { - t.plan(2) - - const [token, reset] = tokenWithMocks({ - log: { - gauge: { - show: name => { - t.equal(name, 'token') - }, - }, - }, - }) - - t.teardown(reset) + const { token } = await mockToken(t) await t.rejects(token.exec(['rm']), /`` argument is required/) }) t.test('token revoke ambiguous id errors', async t => { - t.plan(7) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234' }, { key: 'abcd5678' }]), }, }) - t.teardown(reset) - await t.rejects(token.exec(['rm', 'abcd']), /Token ID "abcd" was ambiguous/) }) t.test('token revoke unknown id errors', async t => { - t.plan(7) - - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - newItem: (action, len) => { - t.equal(action, 'removing tokens') - t.equal(len, 0) - return { - info: (name, progress) => { - t.equal(name, 'token') - t.equal(progress, 'getting existing list') - }, - } - }, + const { token } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, profile: { listTokens: () => Promise.resolve([{ key: 'abcd1234' }]), }, }) - t.teardown(reset) - await t.rejects(token.exec(['rm', 'efgh']), /Unknown token id or value "efgh"./) }) t.test('token create', async t => { - t.plan(14) - const now = new Date().toISOString() const password = 'thisisnotreallyapassword' - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { - registry: 'https://registry.npmjs.org', - cidr: ['10.0.0.0/8', '192.168.1.0/24'], - }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, + const { token, joinedOutput } = await mockToken(t, { + config: { + registry: 'https://registry.npmjs.org', + cidr: ['10.0.0.0/8', '192.168.1.0/24'], }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - info: (name, message) => { - t.equal(name, 'token') - t.equal(message, 'creating') - }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, readUserInfo: { password: () => Promise.resolve(password), @@ -687,7 +371,7 @@ t.test('token create', async t => { profile: { createToken: (pw, readonly, cidr) => { t.equal(pw, password) - t.equal(readonly, undefined) + t.equal(readonly, false) t.same(cidr, ['10.0.0.0/8', '192.168.1.0/24'], 'defaults to empty array') return { key: 'abcd1234', @@ -699,49 +383,30 @@ t.test('token create', async t => { } }, }, - output: spec => { - const lines = spec.split(/\r?\n/) - t.match(lines[1], 'token') - t.match(lines[1], 'efgh5678', 'prints the whole token') - t.match(lines[3], 'created') - t.match(lines[3], now, 'prints the correct timestamp') - t.match(lines[5], 'readonly') - t.match(lines[5], 'false', 'prints the readonly flag') - t.match(lines[7], 'cidr_whitelist') - }, - }) - t.teardown(reset) + }) await token.exec(['create']) + + const lines = joinedOutput().split(/\r?\n/) + t.match(lines[1], 'token') + t.match(lines[1], 'efgh5678', 'prints the whole token') + t.match(lines[3], 'created') + t.match(lines[3], now, 'prints the correct timestamp') + t.match(lines[5], 'readonly') + t.match(lines[5], 'false', 'prints the readonly flag') + t.match(lines[7], 'cidr_whitelist') }) t.test('token create json output', async t => { - t.plan(9) - const now = new Date().toISOString() const password = 'thisisnotreallyapassword' - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', json: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - info: (name, message) => { - t.equal(name, 'token') - t.equal(message, 'creating') - }, + const { token } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', json: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, readUserInfo: { password: () => Promise.resolve(password), @@ -749,7 +414,7 @@ t.test('token create json output', async t => { profile: { createToken: (pw, readonly, cidr) => { t.equal(pw, password) - t.equal(readonly, undefined) + t.equal(readonly, false) t.same(cidr, [], 'defaults to empty array') return { key: 'abcd1234', @@ -772,38 +437,18 @@ t.test('token create json output', async t => { }, }) - t.teardown(reset) - await token.exec(['create']) }) t.test('token create parseable output', async t => { - t.plan(11) - const now = new Date().toISOString() const password = 'thisisnotreallyapassword' - let callCount = 0 - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', parseable: true }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, - info: (name, message) => { - t.equal(name, 'token') - t.equal(message, 'creating') - }, + const { token, joinedOutput } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', parseable: true }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, readUserInfo: { password: () => Promise.resolve(password), @@ -811,7 +456,7 @@ t.test('token create parseable output', async t => { profile: { createToken: (pw, readonly, cidr) => { t.equal(pw, password) - t.equal(readonly, undefined) + t.equal(readonly, false) t.same(cidr, [], 'defaults to empty array') return { key: 'abcd1234', @@ -823,54 +468,32 @@ t.test('token create parseable output', async t => { } }, }, - output: spec => { - ++callCount - if (callCount === 1) { - t.match(spec, 'token\tefgh5678', 'prints the token') - } else if (callCount === 2) { - t.match(spec, `created\t${now}`, 'prints the created timestamp') - } else if (callCount === 3) { - t.match(spec, 'readonly\tfalse', 'prints the readonly flag') - } else { - t.match(spec, 'cidr_whitelist\t', 'prints the cidr whitelist') - } - }, }) - t.teardown(reset) - await token.exec(['create']) + + const spec = joinedOutput().split(/\r?\n/) + + t.match(spec[0], 'token\tefgh5678', 'prints the token') + t.match(spec[1], `created\t${now}`, 'prints the created timestamp') + t.match(spec[2], 'readonly\tfalse', 'prints the readonly flag') + t.match(spec[3], 'cidr_whitelist\t', 'prints the cidr whitelist') }) t.test('token create ipv6 cidr', async t => { - t.plan(3) - const password = 'thisisnotreallyapassword' - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, + const { token } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, readUserInfo: { password: () => Promise.resolve(password), }, }) - t.teardown(reset) - await t.rejects( token.exec(['create']), { @@ -882,34 +505,19 @@ t.test('token create ipv6 cidr', async t => { }) t.test('token create invalid cidr', async t => { - t.plan(3) - const password = 'thisisnotreallyapassword' - const [token, reset] = tokenWithMocks({ - npm: { - flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, - config: { - getCredentialsByURI: uri => { - t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry') - return { token: 'thisisnotarealtoken' } - }, - }, - }, - log: { - gauge: { - show: name => { - t.equal(name, 'token', 'starts a gauge') - }, - }, + const { token } = await mockToken(t, { + config: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, + getCredentialsByURI: uri => { + t.equal(uri, 'https://registry.npmjs.org/', 'requests correct registry') + return { token: 'thisisnotarealtoken' } }, readUserInfo: { password: () => Promise.resolve(password), }, }) - t.teardown(reset) - await t.rejects( token.exec(['create']), { code: 'EINVALIDCIDR', message: /CIDR whitelist contains invalid CIDR entry: apple\/cider/ }, diff --git a/deps/npm/test/lib/commands/uninstall.js b/deps/npm/test/lib/commands/uninstall.js index ec7961f9c96c4e..59a517d144d38a 100644 --- a/deps/npm/test/lib/commands/uninstall.js +++ b/deps/npm/test/lib/commands/uninstall.js @@ -1,225 +1,205 @@ const t = require('tap') const fs = require('fs') const { resolve } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') - -const npm = mockNpm({ - globalDir: '', - config: { - global: false, - prefix: '', - }, - localPrefix: '', -}) -const mocks = { - '../../../lib/utils/reify-finish.js': () => Promise.resolve(), -} - -const Uninstall = t.mock('../../../lib/commands/uninstall.js', mocks) -const uninstall = new Uninstall(npm) +const _mockNpm = require('../../fixtures/mock-npm') + +const mockNpm = async (t, opts = {}) => { + const res = await _mockNpm(t, { + ...opts, + mocks: { + ...opts.mocks, + '{LIB}/utils/reify-finish.js': async () => {}, + }, + }) -t.afterEach(() => { - npm.globalDir = '' - npm.prefix = '' - npm.localPrefix = '' - npm.flatOptions.global = false - npm.flatOptions.prefix = '' -}) + return { + ...res, + uninstall: (args) => res.npm.exec('uninstall', args), + } +} t.test('remove single installed lib', async t => { - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-rm-single-lib', - version: '1.0.0', - dependencies: { - a: '*', - b: '*', - }, - }), - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - }, - }, - 'package-lock.json': JSON.stringify({ - name: 'test-rm-single-lib', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - packages: { - '': { - name: 'test-rm-single-lib', - version: '1.0.0', - dependencies: { - a: '*', - }, - }, - 'node_modules/a': { - version: '1.0.0', - }, - 'node_modules/b': { - version: '1.0.0', + const { uninstall, prefix } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-rm-single-lib', + version: '1.0.0', + dependencies: { + a: '*', + b: '*', }, - }, - dependencies: { + }), + node_modules: { a: { - version: '1.0.0', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, b: { - version: '1.0.0', + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), }, }, - }), + 'package-lock.json': JSON.stringify({ + name: 'test-rm-single-lib', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'test-rm-single-lib', + version: '1.0.0', + dependencies: { + a: '*', + }, + }, + 'node_modules/a': { + version: '1.0.0', + }, + 'node_modules/b': { + version: '1.0.0', + }, + }, + dependencies: { + a: { + version: '1.0.0', + }, + b: { + version: '1.0.0', + }, + }, + }), + }, }) - const b = resolve(path, 'node_modules/b') - t.ok(() => fs.statSync(b)) + const b = resolve(prefix, 'node_modules/b') + t.ok(fs.statSync(b)) - npm.localPrefix = path - - await uninstall.exec(['b']) + await uninstall(['b']) t.throws(() => fs.statSync(b), 'should have removed package from npm') }) t.test('remove multiple installed libs', async t => { - const path = t.testdir({ - node_modules: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), - }, - b: { - 'package.json': JSON.stringify({ - name: 'b', - version: '1.0.0', - }), - }, - }, - 'package-lock.json': JSON.stringify({ - name: 'test-rm-single-lib', - version: '1.0.0', - lockfileVersion: 2, - requires: true, - packages: { - '': { - name: 'test-rm-single-lib', - version: '1.0.0', - dependencies: { - a: '*', - }, - }, - 'node_modules/a': { - version: '1.0.0', - }, - 'node_modules/b': { - version: '1.0.0', - }, - }, - dependencies: { + const { uninstall, prefix } = await mockNpm(t, { + prefixDir: { + node_modules: { a: { - version: '1.0.0', + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, b: { - version: '1.0.0', + 'package.json': JSON.stringify({ + name: 'b', + version: '1.0.0', + }), }, }, - }), + 'package-lock.json': JSON.stringify({ + name: 'test-rm-single-lib', + version: '1.0.0', + lockfileVersion: 2, + requires: true, + packages: { + '': { + name: 'test-rm-single-lib', + version: '1.0.0', + dependencies: { + a: '*', + }, + }, + 'node_modules/a': { + version: '1.0.0', + }, + 'node_modules/b': { + version: '1.0.0', + }, + }, + dependencies: { + a: { + version: '1.0.0', + }, + b: { + version: '1.0.0', + }, + }, + }), + }, }) - const a = resolve(path, 'node_modules/a') - const b = resolve(path, 'node_modules/b') - t.ok(() => fs.statSync(a)) - t.ok(() => fs.statSync(b)) - - npm.localPrefix = path + const a = resolve(prefix, 'node_modules/a') + const b = resolve(prefix, 'node_modules/b') + t.ok(fs.statSync(a)) + t.ok(fs.statSync(b)) - await uninstall.exec(['b']) + await uninstall(['b']) t.throws(() => fs.statSync(a), 'should have removed a package from nm') t.throws(() => fs.statSync(b), 'should have removed b package from nm') }) t.test('no args local', async t => { - const path = t.testdir() - - npm.flatOptions.prefix = path + const { uninstall } = await mockNpm(t) await t.rejects( - uninstall.exec([]), + uninstall([]), /Must provide a package name to remove/, 'should throw package name required error' ) }) t.test('no args global', async t => { - const path = t.testdir({ - lib: { - node_modules: { - a: t.fixture('symlink', '../../projects/a'), - }, + const { uninstall, npm } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), }, - projects: { - a: { - 'package.json': JSON.stringify({ - name: 'a', - version: '1.0.0', - }), + globalPrefixDir: { + node_modules: { + a: t.fixture('symlink', '../../prefix'), }, }, + config: { global: true }, }) - npm.localPrefix = resolve(path, 'projects', 'a') - npm.globalDir = resolve(path, 'lib', 'node_modules') - npm.config.set('global', true) - - const a = resolve(path, 'lib/node_modules/a') - t.ok(() => fs.statSync(a)) + const a = resolve(npm.globalDir, 'a') + t.ok(fs.statSync(a)) - await uninstall.exec([]) + await uninstall([]) t.throws(() => fs.statSync(a), 'should have removed global nm symlink') }) t.test('no args global but no package.json', async t => { - const path = t.testdir({}) - - npm.prefix = path - npm.localPrefix = path - npm.flatOptions.global = true + const { uninstall } = await mockNpm(t, { + config: { global: true }, + }) await t.rejects( - uninstall.exec([]), + uninstall([]), /npm uninstall/ ) }) t.test('unknown error reading from localPrefix package.json', async t => { - const path = t.testdir({}) - - const Uninstall = t.mock('../../../lib/commands/uninstall.js', { - ...mocks, - 'read-package-json-fast': () => Promise.reject(new Error('ERR')), + const { uninstall } = await mockNpm(t, { + config: { global: true }, + mocks: { + 'read-package-json-fast': async () => { + throw new Error('ERR') + }, + }, }) - const uninstall = new Uninstall(npm) - - npm.prefix = path - npm.localPrefix = path - npm.flatOptions.global = true await t.rejects( - uninstall.exec([]), + uninstall([]), /ERR/, 'should throw unknown error' ) diff --git a/deps/npm/test/lib/commands/update.js b/deps/npm/test/lib/commands/update.js index fe52554c95f255..f42fb8a4146b02 100644 --- a/deps/npm/test/lib/commands/update.js +++ b/deps/npm/test/lib/commands/update.js @@ -1,166 +1,83 @@ const t = require('tap') -const { resolve } = require('path') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const _mockNpm = require('../../fixtures/mock-npm') + +// XXX: this test has been refactored to use the new mockNpm +// but it still only asserts the options passed to arborist. +// TODO: make this really test npm update scenarios +const mockUpdate = async (t, { exec = [], ...opts } = {}) => { + let ctor = null + let reify = null + let finish = null + + const res = await _mockNpm(t, { + ...opts, + mocks: { + '@npmcli/arborist': class Arborist { + constructor (o) { + ctor = o + } + + reify (o) { + reify = o + } + }, + '{LIB}/utils/reify-finish.js': (_, o) => { + finish = o + }, + }, + }) -const config = { - depth: 0, - global: false, -} -const noop = () => null -const npm = mockNpm({ - globalDir: '', - config, - prefix: '', -}) -const mocks = { - '@npmcli/arborist': class { - reify () {} - }, - '../../../lib/utils/reify-finish.js': noop, -} + await res.npm.exec('update', exec) -t.afterEach(() => { - npm.prefix = '' - config.global = false - npm.globalDir = '' -}) + return { + ...res, + ctor, + reify, + finish, + } +} t.test('no args', async t => { - t.plan(4) - - npm.prefix = '/project/a' - - class Arborist { - constructor (args) { - const { log, ...rest } = args - t.same( - rest, - { - ...npm.flatOptions, - path: npm.prefix, - save: false, - workspaces: null, - }, - 'should call arborist contructor with expected args' - ) - } + const { ctor, reify, finish, prefix } = await mockUpdate(t) - reify ({ save, update }) { - t.equal(save, false, 'should default to save=false') - t.equal(update, true, 'should update all deps') - } - } + t.equal(ctor.path, prefix, 'path') + t.equal(ctor.save, false, 'should default to save=false') + t.equal(ctor.workspaces, undefined, 'workspaces') - const Update = t.mock('../../../lib/commands/update.js', { - ...mocks, - '../../../lib/utils/reify-finish.js': (npm, arb) => { - t.match(arb, Arborist, 'should reify-finish with arborist instance') - }, - '@npmcli/arborist': Arborist, - }) - const update = new Update(npm) + t.equal(reify.update, true, 'should update all deps') - await update.exec([]) + t.equal(finish.constructor.name, 'Arborist') }) t.test('with args', async t => { - t.plan(4) - - npm.prefix = '/project/a' - config.save = true - - class Arborist { - constructor (args) { - const { log, ...rest } = args - t.same( - rest, - { - ...npm.flatOptions, - path: npm.prefix, - save: true, - workspaces: null, - }, - 'should call arborist contructor with expected args' - ) - } - - reify ({ save, update }) { - t.equal(save, true, 'should pass save if manually set') - t.same(update, ['ipt'], 'should update listed deps') - } - } - - const Update = t.mock('../../../lib/commands/update.js', { - ...mocks, - '../../../lib/utils/reify-finish.js': (npm, arb) => { - t.match(arb, Arborist, 'should reify-finish with arborist instance') - }, - '@npmcli/arborist': Arborist, + const { ctor, reify } = await mockUpdate(t, { + config: { save: true }, + exec: ['ipt'], }) - const update = new Update(npm) - await update.exec(['ipt']) + t.equal(ctor.save, true, 'save') + t.strictSame(reify.update, ['ipt'], 'ipt') }) t.test('update --depth=', async t => { - t.plan(2) - - npm.prefix = '/project/a' - config.depth = 1 - - const Update = t.mock('../../../lib/commands/update.js', { - ...mocks, - 'proc-log': { - warn: (title, msg) => { - t.equal(title, 'update', 'should print expected title') - t.match( - msg, - /The --depth option no longer has any effect/, - 'should print expected warning message' - ) - }, - }, + const { logs } = await mockUpdate(t, { + config: { depth: 1 }, }) - const update = new Update(npm) - await update.exec([]) + const [title, msg] = logs.warn[0] + t.equal(title, 'update', 'should print expected title') + t.match( + msg, + /The --depth option no longer has any effect/, + 'should print expected warning message' + ) }) t.test('update --global', async t => { - t.plan(2) - - const normalizePath = p => p.replace(/\\+/g, '/') - const redactCwd = (path) => normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') - - npm.prefix = '/project/a' - npm.globalDir = resolve(process.cwd(), 'global/lib/node_modules') - config.global = true - - class Arborist { - constructor (args) { - const { path, log, ...rest } = args - t.same( - rest, - { ...npm.flatOptions, save: true, workspaces: undefined }, - 'should call arborist contructor with expected options' - ) - - t.equal( - redactCwd(path), - '{CWD}/global/lib', - 'should run with expected prefix' - ) - } - - reify () {} - } - - const Update = t.mock('../../../lib/commands/update.js', { - ...mocks, - '@npmcli/arborist': Arborist, + const { ctor, globalPrefix } = await mockUpdate(t, { + config: { global: true }, }) - const update = new Update(npm) - await update.exec([]) + t.match(ctor.path, globalPrefix) + t.ok(ctor.path.startsWith(globalPrefix)) }) diff --git a/deps/npm/test/lib/commands/version.js b/deps/npm/test/lib/commands/version.js index 154f6a6f83361a..c48ff827fa28cb 100644 --- a/deps/npm/test/lib/commands/version.js +++ b/deps/npm/test/lib/commands/version.js @@ -1,75 +1,52 @@ const { readFileSync, statSync } = require('fs') const { resolve } = require('path') const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') +const _mockNpm = require('../../fixtures/mock-npm') const mockGlobals = require('../../fixtures/mock-globals.js') -let result = [] - -const noop = () => null -const config = { - 'git-tag-version': true, - 'tag-version-prefix': 'v', - json: false, -} -const flatOptions = { - workspacesUpdate: true, -} -const npm = mockNpm({ - config, - flatOptions, - localPrefix: '', - prefix: '', - version: '1.0.0', - output: (...msg) => { - for (const m of msg) { - result.push(m) - } - }, -}) -const mocks = { - '../../../lib/utils/reify-finish.js': noop, +const mockNpm = async (t, opts = {}) => { + const res = await _mockNpm(t, { + ...opts, + mocks: { + ...opts.mocks, + '{ROOT}/package.json': { version: '1.0.0' }, + }, + }) + return { + ...res, + version: { exec: (args) => res.npm.exec('version', args) }, + result: () => res.outputs[0], + } } -const Version = t.mock('../../../lib/commands/version.js', mocks) -const version = new Version(npm) - -t.afterEach(() => { - flatOptions.workspacesUpdate = true - config.json = false - npm.localPrefix = '' - npm.prefix = '' - result = [] -}) - -t.test('node@1', t => { +t.test('node@1', async t => { mockGlobals(t, { 'process.versions': { node: '1.0.0' } }, { replace: true }) t.test('no args', async t => { - const prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'test-version-no-args', - version: '3.2.1', - }), + const { version, result } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-version-no-args', + version: '3.2.1', + }), + }, }) - npm.prefix = prefix await version.exec([]) - t.same( - result, - [ - { - 'test-version-no-args': '3.2.1', - node: '1.0.0', - npm: '1.0.0', - }, - ], + t.strictSame( + result(), + [{ + 'test-version-no-args': '3.2.1', + node: '1.0.0', + npm: '1.0.0', + }], 'should output expected values for various versions in npm' ) }) t.test('too many args', async t => { + const { version } = await mockNpm(t) await t.rejects( version.exec(['foo', 'bar']), /npm version/, @@ -78,6 +55,8 @@ t.test('node@1', t => { }) t.test('completion', async t => { + const { npm } = await mockNpm(t) + const version = await npm.cmd('version') const testComp = async (argv, expect) => { const res = await version.completion({ conf: { argv: { remain: argv } } }) t.strictSame(res, expect, argv.join(' ')) @@ -88,99 +67,79 @@ t.test('node@1', t => { ['major', 'minor', 'patch', 'premajor', 'preminor', 'prepatch', 'prerelease', 'from-git'] ) await testComp(['npm', 'version', 'major'], []) - - t.end() }) t.test('failure reading package.json', async t => { - const prefix = t.testdir({}) - npm.prefix = prefix + const { version, result } = await mockNpm(t) await version.exec([]) - t.same( - result, - [ - { - npm: '1.0.0', - node: '1.0.0', - }, - ], + t.strictSame( + result(), + [{ + npm: '1.0.0', + node: '1.0.0', + }], 'should not have package name on returning object' ) }) - t.end() }) -t.test('empty versions', t => { +t.test('empty versions', async t => { mockGlobals(t, { 'process.versions': {} }, { replace: true }) t.test('--json option', async t => { - const prefix = t.testdir({}) - config.json = true - npm.prefix = prefix + const { version, result } = await mockNpm(t, { + config: { json: true }, + }) await version.exec([]) - t.same(result, ['{\n "npm": "1.0.0"\n}'], 'should return json stringified result') + t.same(result(), ['{\n "npm": "1.0.0"\n}'], 'should return json stringified result') }) t.test('with one arg', async t => { - const Version = t.mock('../../../lib/commands/version.js', { - ...mocks, - libnpmversion: (arg, opts) => { - t.equal(arg, 'major', 'should forward expected value') - t.match( - opts, - { - path: '', - }, - 'should forward expected options' - ) - return '4.0.0' + const { version, result } = await mockNpm(t, { + mocks: { + libnpmversion: () => '4.0.0', }, }) - const version = new Version(npm) await version.exec(['major']) - t.same(result, ['v4.0.0'], 'outputs the new version prefixed by the tagVersionPrefix') + t.same(result(), ['v4.0.0'], 'outputs the new version prefixed by the tagVersionPrefix') }) t.test('workspaces', async t => { - t.teardown(() => { - npm.localPrefix = '' - npm.prefix = '' - }) - t.test('no args, all workspaces', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], + const { version, result } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), }, + config: { workspaces: true }, }) - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces([], []) + + await version.exec([]) t.same( - result, + result(), [ { 'workspaces-test': '1.0.0', @@ -194,35 +153,38 @@ t.test('empty versions', t => { }) t.test('no args, single workspaces', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], + const { version, result } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), + config: { + workspace: 'workspace-a', }, }) - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces([], ['workspace-a']) + + await version.exec([]) t.same( - result, + result(), [ { 'workspaces-test': '1.0.0', @@ -235,39 +197,40 @@ t.test('empty versions', t => { }) t.test('no args, all workspaces, workspace with missing name or version', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + const { version, result } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], + }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + }), + }, + 'workspace-c': { + 'package.json': JSON.stringify({ + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - }), - }, - 'workspace-c': { - 'package.json': JSON.stringify({ - version: '1.0.0', - }), }, + config: { workspaces: true }, }) - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces([], []) + + await version.exec([]) t.same( - result, + result(), [ { 'workspaces-test': '1.0.0', @@ -280,151 +243,145 @@ t.test('empty versions', t => { }) t.test('with one arg, all workspaces', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], + const { version, outputs, prefix } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), - }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), }, + config: { workspaces: true }, }) - const Version = t.mock('../../../lib/commands/version.js', { - '../../../lib/utils/reify-finish.js': noop, - }) - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces(['major'], []) + await version.exec(['major']) t.same( - result, + outputs.map(o => o[0]).slice(0, 4), ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix' ) - t.matchSnapshot(readFileSync(resolve(testDir, 'package-lock.json'), 'utf8')) + t.matchSnapshot(readFileSync(resolve(prefix, 'package-lock.json'), 'utf8')) }) t.test('with one arg, all workspaces, saves package.json', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], - dependencies: { - 'workspace-a': '^1.0.0', - 'workspace-b': '^1.0.0', + const { version, outputs, prefix } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + dependencies: { + 'workspace-a': '^1.0.0', + 'workspace-b': '^1.0.0', + }, }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), + config: { + save: true, + workspaces: true, }, }) - const Version = t.mock('../../../lib/commands/version.js', { - '../../../lib/utils/reify-finish.js': noop, - }) - config.save = true - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces(['major'], []) + await version.exec(['major']) t.same( - result, + outputs.map(o => o[0]).slice(0, 4), ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix' ) - t.matchSnapshot(readFileSync(resolve(testDir, 'package-lock.json'), 'utf8')) + t.matchSnapshot(readFileSync(resolve(prefix, 'package-lock.json'), 'utf8')) }) t.test('too many args', async t => { + const { version } = await mockNpm(t, { config: { workspaces: true } }) + await t.rejects( - version.execWorkspaces(['foo', 'bar'], []), + version.exec(['foo', 'bar']), /npm version/, 'should throw usage instructions error' ) }) t.test('no workspaces-update', async t => { - flatOptions.workspacesUpdate = false - - const libNpmVersionArgs = [] - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'workspaces-test', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b'], + const { version, outputs, prefix } = await mockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify( + { + name: 'workspaces-test', + version: '1.0.0', + workspaces: ['workspace-a', 'workspace-b'], + }, + null, + 2 + ), + 'workspace-a': { + 'package.json': JSON.stringify({ + name: 'workspace-a', + version: '1.0.0', + }), + }, + 'workspace-b': { + 'package.json': JSON.stringify({ + name: 'workspace-b', + version: '1.0.0', + }), }, - null, - 2 - ), - 'workspace-a': { - 'package.json': JSON.stringify({ - name: 'workspace-a', - version: '1.0.0', - }), }, - 'workspace-b': { - 'package.json': JSON.stringify({ - name: 'workspace-b', - version: '1.0.0', - }), + mocks: { + libnpmversion: (arg, opts) => { + return '2.0.0' + }, }, - }) - const Version = t.mock('../../../lib/commands/version.js', { - ...mocks, - libnpmversion: (arg, opts) => { - libNpmVersionArgs.push([arg, opts]) - return '2.0.0' + config: { + workspaces: true, + 'workspaces-update': false, }, }) - npm.localPrefix = testDir - npm.prefix = testDir - const version = new Version(npm) - await version.execWorkspaces(['major'], []) + await version.exec(['major']) t.same( - result, + outputs.map(o => o[0]).slice(0, 4), ['workspace-a', 'v2.0.0', 'workspace-b', 'v2.0.0'], 'outputs the new version for only the workspaces prefixed by the tagVersionPrefix' ) t.throws( - () => statSync(resolve(testDir, 'package-lock.json')), + () => statSync(resolve(prefix, 'package-lock.json')), 'should not have a lockfile since have not reified' ) }) }) - - t.end() }) diff --git a/deps/npm/test/lib/commands/view.js b/deps/npm/test/lib/commands/view.js index d347bc9230ec8a..c6a4bf8fb79f44 100644 --- a/deps/npm/test/lib/commands/view.js +++ b/deps/npm/test/lib/commands/view.js @@ -262,93 +262,88 @@ const packument = (nv, opts) => { } const loadMockNpm = async function (t, opts = {}) { - const consoleLogs = [] const mockNpm = await _loadMockNpm(t, { + command: 'view', mocks: { pacote: { packument, }, }, - globals: { - 'console.log': (...args) => { - consoleLogs.push(args) - }, - }, ...opts, }) - return { ...mockNpm, consoleLogs } + return mockNpm } t.test('package from git', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['https://github.com/npm/green']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['https://github.com/npm/green']) + t.matchSnapshot(outputs.join('\n')) }) t.test('deprecated package with license, bugs, repository and other fields', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['green@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['green@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('deprecated package with unicode', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: true } }) - await npm.exec('view', ['green@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: true } }) + await view.exec(['green@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with more than 25 deps', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['black@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['black@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with maintainers info as object', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['pink@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['pink@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with homepage', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['orange@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['orange@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with no versions', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['brown']) - t.equal(consoleLogs.join('\n'), '', 'no info to display') + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['brown']) + t.equal(outputs.join('\n'), '', 'no info to display') }) t.test('package with no repo or homepage', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['blue@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['blue@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with semver range', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['blue@^1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['blue@^1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with no modified time', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { unicode: false } }) - await npm.exec('view', ['cyan@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { unicode: false } }) + await view.exec(['cyan@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with --json and semver range', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { json: true } }) - await npm.exec('view', ['cyan@^1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { config: { json: true } }) + await view.exec(['cyan@^1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('package with --json and no versions', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { config: { json: true } }) - await npm.exec('view', ['brown']) - t.equal(consoleLogs.join('\n'), '', 'no info to display') + const { view, outputs } = await loadMockNpm(t, { config: { json: true } }) + await view.exec(['brown']) + t.equal(outputs.join('\n'), '', 'no info to display') }) t.test('package in cwd', async t => { @@ -360,72 +355,71 @@ t.test('package in cwd', async t => { } t.test('specific version', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { prefixDir }) - await npm.exec('view', ['.@1.0.0']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { prefixDir }) + await view.exec(['.@1.0.0']) + t.matchSnapshot(outputs.join('\n')) }) t.test('non-specific version', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { prefixDir }) - await npm.exec('view', ['.']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { prefixDir }) + await view.exec(['.']) + t.matchSnapshot(outputs.join('\n')) }) t.test('directory', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { prefixDir }) - await npm.exec('view', ['./blue']) - t.matchSnapshot(consoleLogs.join('\n')) + const { view, outputs } = await loadMockNpm(t, { prefixDir }) + await view.exec(['./blue']) + t.matchSnapshot(outputs.join('\n')) }) }) t.test('specific field names', async t => { - const { npm, consoleLogs } = await loadMockNpm(t) - t.afterEach(() => { - consoleLogs.length = 0 - }) + const { view, outputs } = await loadMockNpm(t) + t.afterEach(() => outputs.length = 0) + t.test('readme', async t => { - await npm.exec('view', ['yellow@1.0.0', 'readme']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['yellow@1.0.0', 'readme']) + t.matchSnapshot(outputs.join('\n')) }) t.test('several fields', async t => { - await npm.exec('view', ['yellow@1.0.0', 'name', 'version', 'foo[bar]']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['yellow@1.0.0', 'name', 'version', 'foo[bar]']) + t.matchSnapshot(outputs.join('\n')) }) t.test('several fields with several versions', async t => { - await npm.exec('view', ['yellow@1.x.x', 'author']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['yellow@1.x.x', 'author']) + t.matchSnapshot(outputs.join('\n')) }) t.test('nested field with brackets', async t => { - await npm.exec('view', ['orange@1.0.0', 'dist[shasum]']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['orange@1.0.0', 'dist[shasum]']) + t.matchSnapshot(outputs.join('\n')) }) t.test('maintainers with email', async t => { - await npm.exec('view', ['yellow@1.0.0', 'maintainers', 'name']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['yellow@1.0.0', 'maintainers', 'name']) + t.matchSnapshot(outputs.join('\n')) }) t.test('maintainers with url', async t => { - await npm.exec('view', ['pink@1.0.0', 'maintainers']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['pink@1.0.0', 'maintainers']) + t.matchSnapshot(outputs.join('\n')) }) t.test('unknown nested field ', async t => { - await npm.exec('view', ['yellow@1.0.0', 'dist.foobar']) - t.equal(consoleLogs.join('\n'), '', 'no info to display') + await view.exec(['yellow@1.0.0', 'dist.foobar']) + t.equal(outputs.join('\n'), '', 'no info to display') }) t.test('array field - 1 element', async t => { - await npm.exec('view', ['purple@1.0.0', 'maintainers.name']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['purple@1.0.0', 'maintainers.name']) + t.matchSnapshot(outputs.join('\n')) }) t.test('array field - 2 elements', async t => { - await npm.exec('view', ['yellow@1.x.x', 'maintainers.name']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['yellow@1.x.x', 'maintainers.name']) + t.matchSnapshot(outputs.join('\n')) }) }) @@ -495,84 +489,84 @@ t.test('workspaces', async t => { } t.test('all workspaces', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true }, }) - await npm.exec('view', []) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec([]) + t.matchSnapshot(outputs.join('\n')) }) t.test('one specific workspace', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspace: ['green'] }, }) - await npm.exec('view', []) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec([]) + t.matchSnapshot(outputs.join('\n')) }) t.test('all workspaces --json', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true, json: true }, }) - await npm.exec('view', []) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec([]) + t.matchSnapshot(outputs.join('\n')) }) t.test('all workspaces single field', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true }, }) - await npm.exec('view', ['.', 'name']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['.', 'name']) + t.matchSnapshot(outputs.join('\n')) }) t.test('all workspaces nonexistent field', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true }, }) - await npm.exec('view', ['.', 'foo']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['.', 'foo']) + t.matchSnapshot(outputs.join('\n')) }) t.test('all workspaces nonexistent field --json', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true, json: true }, }) - await npm.exec('view', ['.', 'foo']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['.', 'foo']) + t.matchSnapshot(outputs.join('\n')) }) t.test('all workspaces single field --json', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true, json: true }, }) - await npm.exec('view', ['.', 'name']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['.', 'name']) + t.matchSnapshot(outputs.join('\n')) }) t.test('single workspace --json', async t => { - const { npm, consoleLogs } = await loadMockNpm(t, { + const { view, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspace: ['green'], json: true }, }) - await npm.exec('view', []) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec([]) + t.matchSnapshot(outputs.join('\n')) }) t.test('remote package name', async t => { - const { npm, logs, consoleLogs } = await loadMockNpm(t, { + const { view, logs, outputs } = await loadMockNpm(t, { prefixDir, config: { unicode: false, workspaces: true }, }) - await npm.exec('view', ['pink']) - t.matchSnapshot(consoleLogs.join('\n')) + await view.exec(['pink']) + t.matchSnapshot(outputs.join('\n')) t.matchSnapshot(logs.warn, 'should have warning of ignoring workspaces') }) }) diff --git a/deps/npm/test/lib/docs.js b/deps/npm/test/lib/docs.js index 166651f6020d8a..e8a188b6ad8c48 100644 --- a/deps/npm/test/lib/docs.js +++ b/deps/npm/test/lib/docs.js @@ -41,7 +41,7 @@ t.test('basic usage', async t => { // are generated in the following test const { npm } = await loadMockNpm(t, { mocks: { - '../../lib/utils/cmd-list.js': { commands: [] }, + '{LIB}/utils/cmd-list.js': { commands: [] }, }, }) diff --git a/deps/npm/test/lib/fixtures/mock-globals.js b/deps/npm/test/lib/fixtures/mock-globals.js index 02566e575af5ec..55418dd8e199d7 100644 --- a/deps/npm/test/lib/fixtures/mock-globals.js +++ b/deps/npm/test/lib/fixtures/mock-globals.js @@ -1,6 +1,7 @@ const t = require('tap') const mockGlobals = require('../../fixtures/mock-globals') +/* eslint-disable no-console */ const originals = { platform: process.platform, error: console.error, @@ -28,6 +29,7 @@ t.test('console', async t => { t.equal(console.error, originals.error) }) +/* eslint-enable no-console */ t.test('platform', async (t) => { t.equal(process.platform, originals.platform) @@ -235,6 +237,14 @@ t.test('replace', async (t) => { t.strictSame(process.env, originals.env) }) +t.test('dot key', async t => { + const dotKey = 'this.is.a.single.key' + mockGlobals(t, { + [`process.env."${dotKey}"`]: 'value', + }) + t.strictSame(process.env[dotKey], 'value') +}) + t.test('multiple mocks and resets', async (t) => { const initial = 'a' const platforms = ['b', 'c', 'd', 'e', 'f', 'g'] @@ -299,11 +309,11 @@ t.test('multiple mocks and resets', async (t) => { await t.test('platforms', async (t) => { const resets = platforms.map((p) => { - const { teardown, reset } = mockGlobals(t, { 'process.platform': p }) + const { teardown: nestedTeardown, reset } = mockGlobals(t, { 'process.platform': p }) t.equal(process.platform, p) return [ reset['process.platform'], - teardown, + nestedTeardown, ] }) diff --git a/deps/npm/test/lib/lifecycle-cmd.js b/deps/npm/test/lib/lifecycle-cmd.js index 22011197ead54b..c2701931cac6e8 100644 --- a/deps/npm/test/lib/lifecycle-cmd.js +++ b/deps/npm/test/lib/lifecycle-cmd.js @@ -1,31 +1,32 @@ const t = require('tap') +const mockNpm = require('../fixtures/mock-npm') const LifecycleCmd = require('../../lib/lifecycle-cmd.js') -let runArgs = null -const npm = { - exec: async (cmd, args) => { + +t.test('create a lifecycle command', async t => { + let runArgs = null + const { npm } = await mockNpm(t) + npm.exec = async (cmd, args) => { if (cmd === 'run-script') { runArgs = args return 'called the right thing' } - }, - config: { - validate: () => {}, - }, -} -t.test('create a lifecycle command', async t => { - t.plan(5) + } + class TestStage extends LifecycleCmd { static get name () { return 'test-stage' } } + const cmd = new TestStage(npm) t.match(cmd.usage, /test-stage/) + let result result = await cmd.exec(['some', 'args']) t.same(runArgs, ['test-stage', 'some', 'args']) t.strictSame(result, 'called the right thing') - result = await cmd.execWorkspaces(['some', 'args'], []) + + result = await cmd.execWorkspaces(['some', 'args']) t.same(runArgs, ['test-stage', 'some', 'args']) t.strictSame(result, 'called the right thing') }) diff --git a/deps/npm/test/lib/load-all-commands.js b/deps/npm/test/lib/load-all-commands.js index aaf6a69c27cd6d..dd55560369310e 100644 --- a/deps/npm/test/lib/load-all-commands.js +++ b/deps/npm/test/lib/load-all-commands.js @@ -7,28 +7,41 @@ const util = require('util') const { load: loadMockNpm } = require('../fixtures/mock-npm.js') const { allCommands } = require('../../lib/utils/cmd-list.js') +const isAsyncFn = (v) => typeof v === 'function' && /^\[AsyncFunction:/.test(util.inspect(v)) + t.test('load each command', async t => { for (const cmd of allCommands) { t.test(cmd, async t => { - const { npm, outputs } = await loadMockNpm(t, { + const { npm, outputs, cmd: impl } = await loadMockNpm(t, { + command: cmd, config: { usage: true }, }) - const impl = await npm.cmd(cmd) + const ctor = impl.constructor + if (impl.completion) { t.type(impl.completion, 'function', 'completion, if present, is a function') } - t.type(impl.exec, 'function', 'implementation has an exec function') - t.type(impl.execWorkspaces, 'function', 'implementation has an execWorkspaces function') - t.equal(util.inspect(impl.exec), '[AsyncFunction: exec]', 'exec function is async') - t.equal( - util.inspect(impl.execWorkspaces), - '[AsyncFunction: execWorkspaces]', - 'execWorkspaces function is async' - ) + + // exec fn + t.ok(isAsyncFn(impl.exec), 'exec is async') + t.ok(impl.exec.length <= 1, 'exec fn has 0 or 1 args') + + // workspaces + t.type(ctor.ignoreImplicitWorkspace, 'boolean', 'ctor has ignoreImplictWorkspace boolean') + t.type(ctor.workspaces, 'boolean', 'ctor has workspaces boolean') + if (ctor.workspaces) { + t.ok(isAsyncFn(impl.execWorkspaces), 'execWorkspaces is async') + t.ok(impl.exec.length <= 1, 'execWorkspaces fn has 0 or 1 args') + } else { + t.notOk(impl.execWorkspaces, 'has no execWorkspaces fn') + } + + // name/desc t.ok(impl.description, 'implementation has a description') t.ok(impl.name, 'implementation has a name') t.equal(cmd, impl.name, 'command list and name are the same') - t.ok(impl.ignoreImplicitWorkspace !== undefined, 'implementation has ignoreImplictWorkspace') + + // usage t.match(impl.usage, cmd, 'usage contains the command') await npm.exec(cmd, []) t.match(outputs[0][0], impl.usage, 'usage is what is output') diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index f850ff6aff8a4b..e6936b3e36d5f8 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -1,39 +1,10 @@ const t = require('tap') const { resolve, dirname, join } = require('path') const fs = require('fs') - const { load: loadMockNpm } = require('../fixtures/mock-npm.js') const mockGlobals = require('../fixtures/mock-globals') const { commands } = require('../../lib/utils/cmd-list.js') -// delete this so that we don't have configs from the fact that it -// is being run by 'npm test' -const event = process.env.npm_lifecycle_event - -for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { - if (env === 'npm_command') { - // should only be running this in the 'test' or 'run-script' command! - // if the lifecycle event is 'test', then it'll be either 'test' or 'run', - // otherwise it should always be run-script. Of course, it'll be missing - // if this test is just run directly, which is also acceptable. - if (event === 'test') { - t.ok( - ['test', 'run-script'].some(i => i === process.env[env]), - 'should match "npm test" or "npm run test"' - ) - } else { - t.match(process.env[env], /^(run-script|exec)$/) - } - } - delete process.env[env] -} - -t.afterEach(async (t) => { - for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) { - delete process.env[env] - } -}) - t.test('not yet loaded', async t => { const { npm, logs } = await loadMockNpm(t, { load: false }) t.match(npm, { @@ -160,8 +131,8 @@ t.test('npm.load', async t => { prefixDir: { bin: t.fixture('symlink', dirname(process.execPath)), }, - globals: ({ prefix }) => ({ - 'process.env.PATH': resolve(prefix, 'bin'), + globals: (dirs) => ({ + 'process.env.PATH': resolve(dirs.prefix, 'bin'), 'process.argv': [ node, process.argv[1], @@ -299,9 +270,6 @@ t.test('npm.load', async t => { }, }) - // verify that calling the command with a short name still sets - // the npm.command property to the full canonical name of the cmd. - npm.command = null await npm.exec('run', []) t.equal(npm.command, 'run-script', 'npm.command set to canonical name') @@ -357,9 +325,7 @@ t.test('npm.load', async t => { ], }, }) - // verify that calling the command with a short name still sets - // the npm.command property to the full canonical name of the cmd. - npm.command = null + await t.rejects( npm.exec('run', []), /Workspaces not supported for global packages/ @@ -441,9 +407,9 @@ t.test('debug log', async t => { t.test('can load with bad dir', async t => { const { npm, testdir } = await loadMockNpm(t, { load: false, - config: { - 'logs-dir': (c) => join(c.testdir, 'my_logs_dir'), - }, + config: (dirs) => ({ + 'logs-dir': join(dirs.testdir, 'my_logs_dir'), + }), }) const logsDir = join(testdir, 'my_logs_dir') @@ -648,15 +614,15 @@ t.test('implicit workspace rejection', async t => { workspaces: ['./packages/a'], }), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => join(prefix, 'packages', 'a'), + chdir: ({ prefix }) => join(prefix, 'packages', 'a'), + globals: { 'process.argv': [ process.execPath, process.argv[1], '--color', 'false', '--workspace', './packages/a', ], - }), + }, }) await t.rejects( mock.npm.exec('team', []), @@ -682,14 +648,14 @@ t.test('implicit workspace accept', async t => { workspaces: ['./packages/a'], }), }, - globals: ({ prefix }) => ({ - 'process.cwd': () => join(prefix, 'packages', 'a'), + chdir: ({ prefix }) => join(prefix, 'packages', 'a'), + globals: { 'process.argv': [ process.execPath, process.argv[1], '--color', 'false', ], - }), + }, }) await t.rejects(mock.npm.exec('org', []), /.*Usage/) }) diff --git a/deps/npm/test/lib/utils/audit-error.js b/deps/npm/test/lib/utils/audit-error.js index bcb7d8c16dd7b6..46a9dbc38cd7d3 100644 --- a/deps/npm/test/lib/utils/audit-error.js +++ b/deps/npm/test/lib/utils/audit-error.js @@ -1,36 +1,44 @@ const t = require('tap') +const mockLogs = require('../../fixtures/mock-logs') +const mockNpm = require('../../fixtures/mock-npm') +const tmock = require('../../fixtures/tmock') -const LOGS = [] -const OUTPUT = [] -const output = (...msg) => OUTPUT.push(msg) -const auditError = t.mock('../../../lib/utils/audit-error.js', { - 'proc-log': { - warn: (...msg) => LOGS.push(msg), - }, -}) +const auditError = async (t, { command, error, ...config } = {}) => { + const { logs, logMocks } = mockLogs() + const mockAuditError = tmock(t, '{LIB}/utils/audit-error', logMocks) + + const mock = await mockNpm(t, { + command, + config, + }) -const npm = { - command: null, - flatOptions: {}, - output, + const res = {} + try { + res.result = mockAuditError(mock.npm, error ? { error } : {}) + } catch (err) { + res.error = err + } + + return { + ...res, + logs: logs.warn.filter((l) => l[0] === 'audit'), + output: mock.joinedOutput(), + } } -t.afterEach(() => { - npm.flatOptions = {} - OUTPUT.length = 0 - LOGS.length = 0 -}) -t.test('no error, not audit command', t => { - npm.command = 'install' - t.equal(auditError(npm, {}), false, 'no error') - t.strictSame(OUTPUT, [], 'no output') - t.strictSame(LOGS, [], 'no warnings') - t.end() +t.test('no error, not audit command', async t => { + const { result, error, logs, output } = await auditError(t, { command: 'install' }) + + t.equal(result, false, 'no error') + t.notOk(error, 'no error') + + t.strictSame(output, '', 'no output') + t.strictSame(logs, [], 'no warnings') }) -t.test('error, not audit command', t => { - npm.command = 'install' - t.equal(auditError(npm, { +t.test('error, not audit command', async t => { + const { result, error, logs, output } = await auditError(t, { + command: 'install', error: { message: 'message', body: Buffer.from('body'), @@ -41,16 +49,17 @@ t.test('error, not audit command', t => { }, statusCode: '420', }, - }), true, 'had error') - t.strictSame(OUTPUT, [], 'no output') - t.strictSame(LOGS, [], 'no warnings') - t.end() + }) + + t.equal(result, true, 'had error') + t.notOk(error, 'no error') + t.strictSame(output, '', 'no output') + t.strictSame(logs, [], 'no warnings') }) -t.test('error, audit command, not json', t => { - npm.command = 'audit' - npm.flatOptions.json = false - t.throws(() => auditError(npm, { +t.test('error, audit command, not json', async t => { + const { result, error, logs, output } = await auditError(t, { + command: 'audit', error: { message: 'message', body: Buffer.from('body'), @@ -61,17 +70,19 @@ t.test('error, audit command, not json', t => { }, statusCode: '420', }, - })) + }) + + t.equal(result, undefined) - t.strictSame(OUTPUT, [['body']], 'some output') - t.strictSame(LOGS, [['audit', 'message']], 'some warnings') - t.end() + t.ok(error, 'throws error') + t.strictSame(output, 'body', 'some output') + t.strictSame(logs, [['audit', 'message']], 'some warnings') }) -t.test('error, audit command, json', t => { - npm.command = 'audit' - npm.flatOptions.json = true - t.throws(() => auditError(npm, { +t.test('error, audit command, json', async t => { + const { result, error, logs, output } = await auditError(t, { + json: true, + command: 'audit', error: { message: 'message', body: { response: 'body' }, @@ -82,26 +93,25 @@ t.test('error, audit command, json', t => { }, statusCode: '420', }, - })) + }) - t.strictSame(OUTPUT, [ - [ - '{\n' + - ' "message": "message",\n' + - ' "method": "POST",\n' + - ' "uri": "https://example.com/not/a/registry",\n' + - ' "headers": {\n' + - ' "head": [\n' + - ' "ers"\n' + - ' ]\n' + - ' },\n' + - ' "statusCode": "420",\n' + - ' "body": {\n' + - ' "response": "body"\n' + - ' }\n' + - '}', - ], - ], 'some output') - t.strictSame(LOGS, [['audit', 'message']], 'some warnings') - t.end() + t.equal(result, undefined) + t.ok(error, 'throws error') + t.strictSame(output, + '{\n' + + ' "message": "message",\n' + + ' "method": "POST",\n' + + ' "uri": "https://example.com/not/a/registry",\n' + + ' "headers": {\n' + + ' "head": [\n' + + ' "ers"\n' + + ' ]\n' + + ' },\n' + + ' "statusCode": "420",\n' + + ' "body": {\n' + + ' "response": "body"\n' + + ' }\n' + + '}' + , 'some output') + t.strictSame(logs, [['audit', 'message']], 'some warnings') }) diff --git a/deps/npm/test/lib/utils/completion/installed-deep.js b/deps/npm/test/lib/utils/completion/installed-deep.js index f0e36faee1fddf..fa39f0f0734b83 100644 --- a/deps/npm/test/lib/utils/completion/installed-deep.js +++ b/deps/npm/test/lib/utils/completion/installed-deep.js @@ -1,5 +1,6 @@ const { resolve } = require('path') const t = require('tap') +const installedDeep = require('../../../../lib/utils/completion/installed-deep.js') let prefix let globalDir = 'MISSING_GLOBAL_DIR' @@ -11,8 +12,6 @@ const _flatOptions = { return prefix }, } -const p = '../../../../lib/utils/completion/installed-deep.js' -const installedDeep = require(p) const npm = { flatOptions: _flatOptions, get prefix () { diff --git a/deps/npm/test/lib/utils/completion/installed-shallow.js b/deps/npm/test/lib/utils/completion/installed-shallow.js index 1445cbf2ffb71a..5a65b6b6bfaef7 100644 --- a/deps/npm/test/lib/utils/completion/installed-shallow.js +++ b/deps/npm/test/lib/utils/completion/installed-shallow.js @@ -1,10 +1,9 @@ -const flatOptions = { global: false } -const npm = { flatOptions } const t = require('tap') const { resolve } = require('path') +const installed = require('../../../../lib/utils/completion/installed-shallow.js') -const p = '../../../../lib/utils/completion/installed-shallow.js' -const installed = require(p) +const flatOptions = { global: false } +const npm = { flatOptions } t.test('global not set, include globals with -g', async t => { const dir = t.testdir({ diff --git a/deps/npm/test/lib/utils/config/definitions.js b/deps/npm/test/lib/utils/config/definitions.js index dca584e1048331..288166039bf6fe 100644 --- a/deps/npm/test/lib/utils/config/definitions.js +++ b/deps/npm/test/lib/utils/config/definitions.js @@ -1,14 +1,15 @@ const t = require('tap') const { resolve } = require('path') const mockGlobals = require('../../../fixtures/mock-globals') +const tmock = require('../../../fixtures/tmock') const pkg = require('../../../../package.json') // have to fake the node version, or else it'll only pass on this one mockGlobals(t, { 'process.version': 'v14.8.0', 'process.env.NODE_ENV': undefined }) -const mockDefs = (mocks = {}) => t.mock('../../../../lib/utils/config/definitions.js', mocks) +const mockDefs = (mocks = {}) => tmock(t, '{LIB}/utils/config/definitions.js', mocks) -const isWin = (isWindows) => ({ '../../../../lib/utils/is-windows.js': { isWindows } }) +const isWin = (isWindows) => ({ '{LIB}/utils/is-windows.js': { isWindows } }) t.test('basic flattening function camelCases from css-case', t => { const flat = {} @@ -930,3 +931,12 @@ t.test('remap global-style', t => { t.strictSame(flat, { installStrategy: 'shallow' }) t.end() }) + +t.test('otp changes auth-type', t => { + const obj = { 'auth-type': 'web', otp: 123456 } + const flat = {} + mockDefs().otp.flatten('otp', obj, flat) + t.strictSame(flat, { authType: 'legacy', otp: 123456 }) + t.strictSame(obj, { 'auth-type': 'legacy', otp: 123456 }) + t.end() +}) diff --git a/deps/npm/test/lib/utils/display.js b/deps/npm/test/lib/utils/display.js index c7332bce8e2492..cfe0181e23e79f 100644 --- a/deps/npm/test/lib/utils/display.js +++ b/deps/npm/test/lib/utils/display.js @@ -2,10 +2,11 @@ const t = require('tap') const log = require('../../../lib/utils/log-shim') const mockLogs = require('../../fixtures/mock-logs') const mockGlobals = require('../../fixtures/mock-globals') +const tmock = require('../../fixtures/tmock') const mockDisplay = (t, mocks) => { const { logs, logMocks } = mockLogs(mocks) - const Display = t.mock('../../../lib/utils/display', { + const Display = tmock(t, '{LIB}/utils/display', { ...mocks, ...logMocks, }) @@ -44,7 +45,7 @@ t.test('can log', async (t) => { error: (...args) => logs.push(['error', ...args]), warn: (...args) => logs.push(['warn', ...args]), }, - '../../../lib/utils/explain-eresolve.js': { + '{LIB}/utils/explain-eresolve.js': { explain: (...args) => { explains.push(args) return 'explanation' @@ -71,7 +72,7 @@ t.test('handles log throwing', async (t) => { throw new Error('verbose') }, }, - '../../../lib/utils/explain-eresolve.js': { + '{LIB}/utils/explain-eresolve.js': { explain: () => { throw new Error('explain') }, diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js index 29753c3039f365..9d07693989ea84 100644 --- a/deps/npm/test/lib/utils/error-message.js +++ b/deps/npm/test/lib/utils/error-message.js @@ -1,13 +1,18 @@ const t = require('tap') -const path = require('path') +const { resolve } = require('path') +const fs = require('fs/promises') const { load: _loadMockNpm } = require('../../fixtures/mock-npm.js') const mockGlobals = require('../../fixtures/mock-globals.js') +const tmock = require('../../fixtures/tmock') const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot.js') t.formatSnapshot = (p) => { if (Array.isArray(p.files) && !p.files.length) { delete p.files } + if (p?.json === undefined) { + delete p.json + } return p } t.cleanSnapshot = p => cleanDate(cleanCwd(p)) @@ -22,35 +27,26 @@ mockGlobals(t, { }, }) -const loadMockNpm = async (t, { load, command, prefixDir, config } = {}) => { - const { npm, ...rest } = await _loadMockNpm(t, { - load, - prefixDir, - config, +const loadMockNpm = async (t, { errorMocks, ...opts } = {}) => { + const mockError = tmock(t, '{LIB}/utils/error-message.js', errorMocks) + const res = await _loadMockNpm(t, { + ...opts, mocks: { - '../../package.json': { + ...opts.mocks, + '{ROOT}/package.json': { version: '123.456.789-npm', }, }, }) - if (command !== undefined) { - npm.command = command - } return { - npm, - ...rest, + ...res, + errorMessage: (er) => mockError(er, res.npm), } } -const errorMessage = (er, { mocks, logMocks, npm } = {}) => - t.mock('../../../lib/utils/error-message.js', { ...mocks, ...logMocks })(er, npm) - t.test('just simple messages', async t => { - const npm = await loadMockNpm(t, { + const { errorMessage } = await loadMockNpm(t, { command: 'audit', - config: { - 'node-version': '99.99.99', - }, }) const codes = [ 'ENOAUDIT', @@ -77,8 +73,7 @@ t.test('just simple messages', async t => { 'E403', 'ERR_SOCKET_TIMEOUT', ] - t.plan(codes.length) - codes.forEach(async code => { + for (const code of codes) { const path = '/some/path' const pkgid = 'some@package' const file = '/some/file' @@ -90,12 +85,12 @@ t.test('just simple messages', async t => { file, stack, }) - t.matchSnapshot(errorMessage(er, npm)) - }) + t.matchSnapshot(errorMessage(er)) + } }) t.test('replace message/stack sensistive info', async t => { - const npm = await loadMockNpm(t, { command: 'audit' }) + const { errorMessage } = await loadMockNpm(t, { command: 'audit' }) const path = '/some/path' const pkgid = 'some@package' const file = '/some/file' @@ -108,11 +103,11 @@ t.test('replace message/stack sensistive info', async t => { file, stack, }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) }) t.test('bad engine without config loaded', async t => { - const npm = await loadMockNpm(t, { load: false }) + const { errorMessage } = await loadMockNpm(t, { load: false }) const path = '/some/path' const pkgid = 'some@package' const file = '/some/file' @@ -124,11 +119,11 @@ t.test('bad engine without config loaded', async t => { file, stack, }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) }) t.test('enoent without a file', async t => { - const npm = await loadMockNpm(t) + const { errorMessage } = await loadMockNpm(t) const path = '/some/path' const pkgid = 'some@package' const stack = 'dummy stack trace' @@ -138,11 +133,11 @@ t.test('enoent without a file', async t => { pkgid, stack, }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) }) t.test('enolock without a command', async t => { - const npm = await loadMockNpm(t, { command: null }) + const { errorMessage } = await loadMockNpm(t, { command: null }) const path = '/some/path' const pkgid = 'some@package' const file = '/some/file' @@ -154,41 +149,43 @@ t.test('enolock without a command', async t => { file, stack, }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) }) t.test('default message', async t => { - const npm = await loadMockNpm(t) - t.matchSnapshot(errorMessage(new Error('error object'), npm)) - t.matchSnapshot(errorMessage('error string', npm)) + const { errorMessage } = await loadMockNpm(t) + t.matchSnapshot(errorMessage(new Error('error object'))) + t.matchSnapshot(errorMessage('error string')) t.matchSnapshot(errorMessage(Object.assign(new Error('cmd err'), { cmd: 'some command', signal: 'SIGYOLO', args: ['a', 'r', 'g', 's'], stdout: 'stdout', stderr: 'stderr', - }), npm)) + }))) }) t.test('args are cleaned', async t => { - const npm = await loadMockNpm(t) + const { errorMessage } = await loadMockNpm(t) t.matchSnapshot(errorMessage(Object.assign(new Error('cmd err'), { cmd: 'some command', signal: 'SIGYOLO', args: ['a', 'r', 'g', 's', 'https://evil:password@npmjs.org'], stdout: 'stdout', stderr: 'stderr', - }), npm)) + }))) }) t.test('eacces/eperm', async t => { const runTest = (windows, loaded, cachePath, cacheDest) => async t => { - if (windows) { - mockGlobals(t, { 'process.platform': 'win32' }) - } - const npm = await loadMockNpm(t, { windows, load: loaded }) - const path = `${cachePath ? npm.cache : '/not/cache/dir'}/path` - const dest = `${cacheDest ? npm.cache : '/not/cache/dir'}/dest` + const { errorMessage, logs, cache } = await loadMockNpm(t, { + windows, + load: loaded, + globals: windows ? { 'process.platform': 'win32' } : [], + }) + + const path = `${cachePath ? cache : '/not/cache/dir'}/path` + const dest = `${cacheDest ? cache : '/not/cache/dir'}/dest` const er = Object.assign(new Error('whoopsie'), { code: 'EACCES', path, @@ -196,8 +193,8 @@ t.test('eacces/eperm', async t => { stack: 'dummy stack trace', }) - t.matchSnapshot(errorMessage(er, npm)) - t.matchSnapshot(npm.logs.verbose) + t.matchSnapshot(errorMessage(er)) + t.matchSnapshot(logs.verbose) } for (const windows of [true, false]) { @@ -217,50 +214,14 @@ t.test('json parse', t => { t.test('merge conflict in package.json', async t => { const prefixDir = { - 'package.json': ` -{ - "array": [ -<<<<<<< HEAD - 100, - { - "foo": "baz" - }, -||||||| merged common ancestors - 1, -======= - 111, - 1, - 2, - 3, - { - "foo": "bar" - }, ->>>>>>> a - 1 - ], - "a": { - "b": { -<<<<<<< HEAD - "c": { - "x": "bbbb" - } -||||||| merged common ancestors - "c": { - "x": "aaaa" - } -======= - "c": "xxxx" ->>>>>>> a + 'package.json': await fs.readFile( + resolve(__dirname, '../../fixtures/merge-conflict.json'), 'utf-8'), } - } -} -`, - } - const npm = await loadMockNpm(t, { prefixDir }) + const { errorMessage, npm } = await loadMockNpm(t, { prefixDir }) t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), { code: 'EJSONPARSE', - path: path.resolve(npm.prefix, 'package.json'), - }), npm)) + path: resolve(npm.prefix, 'package.json'), + }))) t.end() }) @@ -268,11 +229,11 @@ t.test('json parse', t => { const prefixDir = { 'package.json': 'not even slightly json', } - const npm = await loadMockNpm(t, { prefixDir }) + const { errorMessage, npm } = await loadMockNpm(t, { prefixDir }) t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', - path: path.resolve(npm.prefix, 'package.json'), - }), npm)) + path: resolve(npm.prefix, 'package.json'), + }))) t.end() }) @@ -280,11 +241,11 @@ t.test('json parse', t => { const prefixDir = { 'blerg.json': 'not even slightly json', } - const npm = await loadMockNpm(t, { prefixDir }) + const { npm, errorMessage } = await loadMockNpm(t, { prefixDir }) t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), { code: 'EJSONPARSE', - path: path.resolve(npm.prefix, 'blerg.json'), - }), npm)) + path: resolve(npm.prefix, 'blerg.json'), + }))) t.end() }) @@ -292,26 +253,26 @@ t.test('json parse', t => { }) t.test('eotp/e401', async t => { - const npm = await loadMockNpm(t) + const { errorMessage } = await loadMockNpm(t) t.test('401, no auth headers', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { code: 'E401', - }), npm)) + }))) t.end() }) t.test('401, no message', t => { t.matchSnapshot(errorMessage({ code: 'E401', - }, npm)) + })) t.end() }) t.test('one-time pass challenge code', t => { t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), { code: 'EOTP', - }), npm)) + }))) t.end() }) @@ -319,7 +280,7 @@ t.test('eotp/e401', async t => { const message = 'one-time pass' t.matchSnapshot(errorMessage(Object.assign(new Error(message), { code: 'E401', - }), npm)) + }))) t.end() }) @@ -339,7 +300,7 @@ t.test('eotp/e401', async t => { }, code: 'E401', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) } @@ -347,11 +308,11 @@ t.test('eotp/e401', async t => { }) t.test('404', async t => { - const npm = await loadMockNpm(t) + const { errorMessage } = await loadMockNpm(t) t.test('no package id', t => { const er = Object.assign(new Error('404 not found'), { code: 'E404' }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) t.test('you should publish it', t => { @@ -359,7 +320,7 @@ t.test('404', async t => { pkgid: 'yolo', code: 'E404', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) t.test('name with warning', t => { @@ -367,7 +328,7 @@ t.test('404', async t => { pkgid: new Array(215).fill('x').join(''), code: 'E404', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) t.test('name with error', t => { @@ -375,7 +336,7 @@ t.test('404', async t => { pkgid: 'node_modules', code: 'E404', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) t.test('cleans sensitive info from package id', t => { @@ -383,13 +344,13 @@ t.test('404', async t => { pkgid: 'http://evil:password@npmjs.org/not-found', code: 'E404', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) }) t.test('bad platform', async t => { - const npm = await loadMockNpm(t) + const { errorMessage } = await loadMockNpm(t) t.test('string os/arch', t => { const er = Object.assign(new Error('a bad plat'), { @@ -404,7 +365,7 @@ t.test('bad platform', async t => { }, code: 'EBADPLATFORM', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) t.test('array os/arch', t => { @@ -420,30 +381,29 @@ t.test('bad platform', async t => { }, code: 'EBADPLATFORM', }) - t.matchSnapshot(errorMessage(er, npm)) + t.matchSnapshot(errorMessage(er)) t.end() }) }) t.test('explain ERESOLVE errors', async t => { - const { npm, ...rest } = await loadMockNpm(t) const EXPLAIN_CALLED = [] - const er = Object.assign(new Error('could not resolve'), { - code: 'ERESOLVE', - }) - - t.matchSnapshot(errorMessage(er, { - npm, - ...rest, - mocks: { - '../../../lib/utils/explain-eresolve.js': { + const { errorMessage } = await loadMockNpm(t, { + errorMocks: { + '{LIB}/utils/explain-eresolve.js': { report: (...args) => { EXPLAIN_CALLED.push(args) return { explanation: 'explanation', file: 'report' } }, }, }, - })) + }) + + const er = Object.assign(new Error('could not resolve'), { + code: 'ERESOLVE', + }) + + t.matchSnapshot(errorMessage(er)) t.match(EXPLAIN_CALLED, [[er, false]]) }) diff --git a/deps/npm/test/lib/utils/exit-handler.js b/deps/npm/test/lib/utils/exit-handler.js index d22ec4dd141a87..76d5fec4c099a8 100644 --- a/deps/npm/test/lib/utils/exit-handler.js +++ b/deps/npm/test/lib/utils/exit-handler.js @@ -2,12 +2,13 @@ const t = require('tap') const os = require('os') const fs = require('fs') const fsMiniPass = require('fs-minipass') -const { join } = require('path') +const { join, resolve } = require('path') const EventEmitter = require('events') const { format } = require('../../../lib/utils/log-file') const { load: loadMockNpm } = require('../../fixtures/mock-npm') const mockGlobals = require('../../fixtures/mock-globals') const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot') +const tmock = require('../../fixtures/tmock') const pick = (obj, ...keys) => keys.reduce((acc, key) => { acc[key] = obj[key] @@ -35,7 +36,8 @@ t.cleanSnapshot = (path) => cleanDate(cleanCwd(path)) // nerf itself, thinking global.process is broken or gone. mockGlobals(t, { process: Object.assign(new EventEmitter(), { - ...pick(process, 'execPath', 'stdout', 'stderr', 'cwd', 'env', 'umask'), + // these are process properties that are needed in the running code and tests + ...pick(process, 'execPath', 'stdout', 'stderr', 'cwd', 'chdir', 'env', 'umask'), argv: ['/node', ...process.argv.slice(1)], version: 'v1.0.0', kill: () => {}, @@ -56,25 +58,32 @@ const mockExitHandler = async (t, { init, load, testdir, config, mocks, files } load, testdir, mocks: { - '../../package.json': { + '{ROOT}/package.json': { version: '1.0.0', }, ...mocks, }, - config: { + config: (dirs) => ({ loglevel: 'notice', - ...config, - }, + ...(typeof config === 'function' ? config(dirs) : config), + }), globals: { 'console.error': (err) => errors.push(err), }, }) - const exitHandler = t.mock('../../../lib/utils/exit-handler.js', { - '../../../lib/utils/error-message.js': (err) => ({ + const exitHandler = tmock(t, '{LIB}/utils/exit-handler.js', { + '{LIB}/utils/error-message.js': (err) => ({ summary: [['ERR SUMMARY', err.message]], detail: [['ERR DETAIL', err.message]], ...(files ? { files } : {}), + json: { + error: { + code: err.code, + summary: err.message, + detail: err.message, + }, + }, }), os: { type: () => 'Foo', @@ -89,7 +98,6 @@ const mockExitHandler = async (t, { init, load, testdir, config, mocks, files } } t.teardown(() => { - delete process.exitCode process.removeAllListeners('exit') }) @@ -101,8 +109,8 @@ const mockExitHandler = async (t, { init, load, testdir, config, mocks, files } // to t.plan() every test to make sure we get process.exit called. Also // introduce a small artificial delay so the logs are consistently finished // by the time the exit handler forces process.exit - exitHandler: (...args) => new Promise(resolve => setTimeout(() => { - process.once('exit', resolve) + exitHandler: (...args) => new Promise(res => setTimeout(() => { + process.once('exit', res) exitHandler(...args) }, 50)), } @@ -338,7 +346,7 @@ t.test('no logs dir', async (t) => { t.test('timers fail to write', async (t) => { // we want the fs.writeFileSync in the Timers class to fail - const mockTimers = t.mock('../../../lib/utils/timers.js', { + const mockTimers = tmock(t, '{LIB}/utils/timers.js', { fs: { ...fs, writeFileSync: (file, ...rest) => { @@ -352,13 +360,13 @@ t.test('timers fail to write', async (t) => { }) const { exitHandler, logs } = await mockExitHandler(t, { - config: { - 'logs-dir': 'LOGS_DIR', + config: (dirs) => ({ + 'logs-dir': resolve(dirs.prefix, 'LOGS_DIR'), timing: true, - }, + }), mocks: { // note, this is relative to test/fixtures/mock-npm.js not this file - '../../lib/utils/timers.js': mockTimers, + '{LIB}/utils/timers.js': mockTimers, }, }) @@ -369,7 +377,7 @@ t.test('timers fail to write', async (t) => { t.test('log files fail to write', async (t) => { // we want the fsMiniPass.WriteStreamSync in the LogFile class to fail - const mockLogFile = t.mock('../../../lib/utils/log-file.js', { + const mockLogFile = tmock(t, '{LIB}/utils/log-file.js', { 'fs-minipass': { ...fsMiniPass, WriteStreamSync: (file, ...rest) => { @@ -381,12 +389,12 @@ t.test('log files fail to write', async (t) => { }) const { exitHandler, logs } = await mockExitHandler(t, { - config: { - 'logs-dir': 'LOGS_DIR', - }, + config: (dirs) => ({ + 'logs-dir': resolve(dirs.prefix, 'LOGS_DIR'), + }), mocks: { // note, this is relative to test/fixtures/mock-npm.js not this file - '../../lib/utils/log-file.js': mockLogFile, + '{LIB}/utils/log-file.js': mockLogFile, }, }) @@ -417,9 +425,9 @@ t.test('files from error message', async (t) => { t.test('files from error message with error', async (t) => { const { exitHandler, logs } = await mockExitHandler(t, { - config: { - 'logs-dir': 'LOGS_DIR', - }, + config: (dirs) => ({ + 'logs-dir': resolve(dirs.prefix, 'LOGS_DIR'), + }), files: [ ['error-file.txt', '# error file content'], ], @@ -587,10 +595,7 @@ t.test('exits uncleanly when only emitting exit event', async (t) => { t.test('do no fancy handling for shellouts', async t => { const { exitHandler, npm, logs } = await mockExitHandler(t) - const exec = await npm.cmd('exec') - - npm.command = 'exec' - npm.commandInstance = exec + await npm.cmd('exec') const loudNoises = () => logs.filter(([level]) => ['warn', 'error'].includes(level)) diff --git a/deps/npm/test/lib/utils/explain-dep.js b/deps/npm/test/lib/utils/explain-dep.js index ed006c01d78fb3..514f28d125a0d7 100644 --- a/deps/npm/test/lib/utils/explain-dep.js +++ b/deps/npm/test/lib/utils/explain-dep.js @@ -1,16 +1,11 @@ const { resolve } = require('path') const t = require('tap') const { explainNode, printNode } = require('../../../lib/utils/explain-dep.js') +const { cleanCwd } = require('../../fixtures/clean-snapshot') + const testdir = t.testdirName -const redactCwd = (path) => { - const normalizePath = p => p - .replace(/\\+/g, '/') - .replace(/\r\n/g, '\n') - return normalizePath(path) - .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') -} -t.cleanSnapshot = (str) => redactCwd(str) +t.cleanSnapshot = (str) => cleanCwd(str) const cases = { prodDep: { diff --git a/deps/npm/test/lib/utils/log-file.js b/deps/npm/test/lib/utils/log-file.js index 4be5231c1c4fa0..e134fe8790bd53 100644 --- a/deps/npm/test/lib/utils/log-file.js +++ b/deps/npm/test/lib/utils/log-file.js @@ -4,11 +4,11 @@ const fs = _fs.promises const path = require('path') const os = require('os') const fsMiniPass = require('fs-minipass') -const rimraf = require('rimraf') +const tmock = require('../../fixtures/tmock') const LogFile = require('../../../lib/utils/log-file.js') const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot') -t.cleanSnapshot = (path) => cleanDate(cleanCwd(path)) +t.cleanSnapshot = (s) => cleanDate(cleanCwd(s)) const getId = (d = new Date()) => d.toISOString().replace(/[.:]/g, '_') const last = arr => arr[arr.length - 1] @@ -43,7 +43,7 @@ const cleanErr = (message) => { const loadLogFile = async (t, { buffer = [], mocks, testdir = {}, ...options } = {}) => { const root = t.testdir(testdir) - const MockLogFile = t.mock('../../../lib/utils/log-file.js', mocks) + const MockLogFile = tmock(t, '{LIB}/utils/log-file.js', mocks) const logFile = new MockLogFile(Object.keys(options).length ? options : undefined) buffer.forEach((b) => logFile.log(...b)) @@ -275,12 +275,14 @@ t.test('rimraf error', async t => { logsMax, testdir: makeOldLogs(oldLogs), mocks: { - rimraf: (...args) => { - if (count >= 3) { - throw new Error('bad rimraf') - } - count++ - return rimraf(...args) + 'fs/promises': { + rm: async (...args) => { + if (count >= 3) { + throw new Error('bad rimraf') + } + count++ + return fs.rm(...args) + }, }, }, }) diff --git a/deps/npm/test/lib/utils/log-shim.js b/deps/npm/test/lib/utils/log-shim.js index dee4efbaa4552d..7c8fb7ce3c9569 100644 --- a/deps/npm/test/lib/utils/log-shim.js +++ b/deps/npm/test/lib/utils/log-shim.js @@ -1,6 +1,7 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') -const makeShim = (mocks) => t.mock('../../../lib/utils/log-shim.js', mocks) +const makeShim = (mocks) => tmock(t, '{LIB}/utils/log-shim.js', mocks) const loggers = [ 'notice', diff --git a/deps/npm/test/lib/utils/open-url-prompt.js b/deps/npm/test/lib/utils/open-url-prompt.js index a18fe85f687517..faf2ab32587af1 100644 --- a/deps/npm/test/lib/utils/open-url-prompt.js +++ b/deps/npm/test/lib/utils/open-url-prompt.js @@ -1,6 +1,7 @@ const t = require('tap') const mockGlobals = require('../../fixtures/mock-globals.js') const EventEmitter = require('events') +const tmock = require('../../fixtures/tmock') const OUTPUT = [] const output = (...args) => OUTPUT.push(args) @@ -22,14 +23,6 @@ let openerUrl = null let openerOpts = null let openerResult = null -const open = async (url, options) => { - openerUrl = url - openerOpts = options - if (openerResult) { - throw openerResult - } -} - let questionShouldResolve = true let openUrlPromptInterrupted = false @@ -49,9 +42,15 @@ const readline = { }), } -const openUrlPrompt = t.mock('../../../lib/utils/open-url-prompt.js', { +const openUrlPrompt = tmock(t, '{LIB}/utils/open-url-prompt.js', { '@npmcli/promise-spawn': { - open, + open: async (url, options) => { + openerUrl = url + openerOpts = options + if (openerResult) { + throw openerResult + } + }, }, readline, }) diff --git a/deps/npm/test/lib/utils/open-url.js b/deps/npm/test/lib/utils/open-url.js index 70afd550333f74..28a11b3609c674 100644 --- a/deps/npm/test/lib/utils/open-url.js +++ b/deps/npm/test/lib/utils/open-url.js @@ -1,4 +1,5 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') const OUTPUT = [] const output = (...args) => OUTPUT.push(args) @@ -28,7 +29,7 @@ const open = async (url, options) => { } } -const openUrl = t.mock('../../../lib/utils/open-url.js', { +const openUrl = tmock(t, '{LIB}/utils/open-url.js', { '@npmcli/promise-spawn': { open, }, diff --git a/deps/npm/test/lib/utils/otplease.js b/deps/npm/test/lib/utils/otplease.js index 79eaa798e60539..d788c39da842c5 100644 --- a/deps/npm/test/lib/utils/otplease.js +++ b/deps/npm/test/lib/utils/otplease.js @@ -1,74 +1,74 @@ const t = require('tap') +const setupMockNpm = require('../../fixtures/mock-npm') +const tmock = require('../../fixtures/tmock') -const { fake: mockNpm } = require('../../fixtures/mock-npm') -const mockGlobals = require('../../fixtures/mock-globals') +const setupOtplease = async (t, { otp = {}, ...rest }, fn) => { + const readUserInfo = { + otp: async () => '1234', + } -const readUserInfo = { - otp: async () => '1234', -} -const webAuth = async (opener) => { - opener() - return '1234' -} + const webAuth = async (opener) => { + opener() + return '1234' + } -const otplease = t.mock('../../../lib/utils/otplease.js', { - '../../../lib/utils/read-user-info.js': readUserInfo, - '../../../lib/utils/open-url-prompt.js': () => {}, - '../../../lib/utils/web-auth': webAuth, -}) + const otplease = tmock(t, '{LIB}/utils/otplease.js', { + '{LIB}/utils/read-user-info.js': readUserInfo, + '{LIB}/utils/open-url-prompt.js': () => {}, + '{LIB}/utils/web-auth': webAuth, + }) + + const { npm } = await setupMockNpm(t, rest) + + return await otplease(npm, otp, fn) +} t.test('returns function results on success', async (t) => { - const fn = () => 'test string' - const result = await otplease(null, {}, fn) + const result = await setupOtplease(t, {}, () => 'test string') t.equal('test string', result) }) t.test('returns function results on otp success', async (t) => { - mockGlobals(t, { - 'process.stdin': { isTTY: true }, - 'process.stdout': { isTTY: true }, - }) const fn = ({ otp }) => { if (otp) { return 'success' } throw Object.assign(new Error('nope'), { code: 'EOTP' }) } - const result = await otplease(null, {}, fn) + + const result = await setupOtplease(t, { + globals: { + 'process.stdin': { isTTY: true }, + 'process.stdout': { isTTY: true }, + }, + }, fn) + t.equal('success', result) }) t.test('prompts for otp for EOTP', async (t) => { - const stdinTTY = process.stdin.isTTY - const stdoutTTY = process.stdout.isTTY - process.stdin.isTTY = true - process.stdout.isTTY = true - t.teardown(() => { - process.stdin.isTTY = stdinTTY - process.stdout.isTTY = stdoutTTY - }) + let called = false - let runs = 0 const fn = async (opts) => { - if (++runs === 1) { + if (!called) { + called = true throw Object.assign(new Error('nope'), { code: 'EOTP' }) } - - t.equal(opts.some, 'prop', 'carried original options') - t.equal(opts.otp, '1234', 'received the otp') - t.end() + return opts } - await otplease(null, { some: 'prop' }, fn) + const result = await setupOtplease(t, { + otp: { some: 'prop' }, + globals: { + 'process.stdin': { isTTY: true }, + 'process.stdout': { isTTY: true }, + }, + }, fn) + + t.strictSame(result, { some: 'prop', otp: '1234' }) }) t.test('returns function results on webauth success', async (t) => { - mockGlobals(t, { - 'process.stdin': { isTTY: true }, - 'process.stdout': { isTTY: true }, - }) - - const npm = mockNpm({ config: { browser: 'firefox' } }) const fn = ({ otp }) => { if (otp) { return 'success' @@ -82,75 +82,64 @@ t.test('returns function results on webauth success', async (t) => { }) } - const result = await otplease(npm, {}, fn) + const result = await setupOtplease(t, { + config: { browser: 'firefox' }, + globals: { + 'process.stdin': { isTTY: true }, + 'process.stdout': { isTTY: true }, + }, + }, fn) + t.equal('success', result) }) t.test('prompts for otp for 401', async (t) => { - const stdinTTY = process.stdin.isTTY - const stdoutTTY = process.stdout.isTTY - process.stdin.isTTY = true - process.stdout.isTTY = true - t.teardown(() => { - process.stdin.isTTY = stdinTTY - process.stdout.isTTY = stdoutTTY - }) + let called = false - let runs = 0 const fn = async (opts) => { - if (++runs === 1) { + if (!called) { + called = true throw Object.assign(new Error('nope'), { code: 'E401', body: 'one-time pass required', }) } - t.equal(opts.some, 'prop', 'carried original options') - t.equal(opts.otp, '1234', 'received the otp') - t.end() + return opts } - await otplease(null, { some: 'prop' }, fn) + const result = await setupOtplease(t, { + globals: { + 'process.stdin': { isTTY: true }, + 'process.stdout': { isTTY: true }, + }, + }, fn) + + t.strictSame(result, { otp: '1234' }) }) t.test('does not prompt for non-otp errors', async (t) => { - const stdinTTY = process.stdin.isTTY - const stdoutTTY = process.stdout.isTTY - process.stdin.isTTY = true - process.stdout.isTTY = true - t.teardown(() => { - process.stdin.isTTY = stdinTTY - process.stdout.isTTY = stdoutTTY - }) - const fn = async (opts) => { throw new Error('nope') } - t.rejects( - otplease(null, { some: 'prop' }, fn), - { message: 'nope' }, - 'rejects with the original error' - ) + await t.rejects(setupOtplease(t, { + globals: { + 'process.stdin': { isTTY: true }, + 'process.stdout': { isTTY: true }, + }, + }, fn), { message: 'nope' }, 'rejects with the original error') }) t.test('does not prompt if stdin or stdout is not a tty', async (t) => { - const stdinTTY = process.stdin.isTTY - const stdoutTTY = process.stdout.isTTY - process.stdin.isTTY = false - process.stdout.isTTY = false - t.teardown(() => { - process.stdin.isTTY = stdinTTY - process.stdout.isTTY = stdoutTTY - }) - const fn = async (opts) => { throw Object.assign(new Error('nope'), { code: 'EOTP' }) } - t.rejects( - otplease(null, { some: 'prop' }, fn), - { message: 'nope' }, - 'rejects with the original error' - ) + await t.rejects(setupOtplease(t, { + globals: { + 'process.stdin': { isTTY: false }, + 'process.stdout': { isTTY: false }, + }, + }, fn), { message: 'nope' }, 'rejects with the original error') }) diff --git a/deps/npm/test/lib/utils/pulse-till-done.js b/deps/npm/test/lib/utils/pulse-till-done.js index 9f7a94614d3bb5..3b3f4b2f2253ef 100644 --- a/deps/npm/test/lib/utils/pulse-till-done.js +++ b/deps/npm/test/lib/utils/pulse-till-done.js @@ -1,8 +1,9 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') let pulseStarted = null -const pulseTillDone = t.mock('../../../lib/utils/pulse-till-done.js', { +const pulseTillDone = tmock(t, '{LIB}/utils/pulse-till-done.js', { npmlog: { gauge: { pulse: () => { diff --git a/deps/npm/test/lib/utils/read-user-info.js b/deps/npm/test/lib/utils/read-user-info.js index be805a2a87c6a0..dfd17a8e37cbe8 100644 --- a/deps/npm/test/lib/utils/read-user-info.js +++ b/deps/npm/test/lib/utils/read-user-info.js @@ -1,4 +1,5 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') let readOpts = null let readResult = null @@ -25,7 +26,7 @@ const npmUserValidate = { } let logMsg = null -const readUserInfo = t.mock('../../../lib/utils/read-user-info.js', { +const readUserInfo = tmock(t, '{LIB}/utils/read-user-info.js', { read, npmlog: { clearProgress: () => {}, diff --git a/deps/npm/test/lib/utils/reify-finish.js b/deps/npm/test/lib/utils/reify-finish.js index b565034058adb7..ee112203a24bc8 100644 --- a/deps/npm/test/lib/utils/reify-finish.js +++ b/deps/npm/test/lib/utils/reify-finish.js @@ -1,4 +1,6 @@ const t = require('tap') +const { cleanNewlines } = require('../../fixtures/clean-snapshot') +const tmock = require('../../fixtures/tmock') const npm = { config: { @@ -30,9 +32,9 @@ const fs = { }, } -const reifyFinish = t.mock('../../../lib/utils/reify-finish.js', { +const reifyFinish = tmock(t, '{LIB}/utils/reify-finish.js', { fs, - '../../../lib/utils/reify-output.js': reifyOutput, + '{LIB}/utils/reify-output.js': reifyOutput, }) t.test('should not write if not global', async t => { @@ -74,6 +76,6 @@ t.test('should write if everything above passes', async t => { }, }) // windowwwwwwssss!!!!! - const data = fs.readFileSync(`${path}/npmrc`, 'utf8').replace(/\r\n/g, '\n') + const data = cleanNewlines(fs.readFileSync(`${path}/npmrc`, 'utf8')) t.matchSnapshot(data, 'written config') }) diff --git a/deps/npm/test/lib/utils/reify-output.js b/deps/npm/test/lib/utils/reify-output.js index b38a14de339098..5d1d5be47efa30 100644 --- a/deps/npm/test/lib/utils/reify-output.js +++ b/deps/npm/test/lib/utils/reify-output.js @@ -1,25 +1,22 @@ const t = require('tap') +const mockNpm = require('../../fixtures/mock-npm') +const reifyOutput = require('../../../lib/utils/reify-output.js') t.cleanSnapshot = str => str.replace(/in [0-9]+m?s/g, 'in {TIME}') -const settings = { - fund: true, -} -const npm = { - started: Date.now(), - flatOptions: settings, - silent: false, +const mockReify = async (t, reify, { command, ...config } = {}) => { + const mock = await mockNpm(t, { + command, + config, + }) + + reifyOutput(mock.npm, reify) + + return mock.joinedOutput() } -const reifyOutput = require('../../../lib/utils/reify-output.js') -t.test('missing info', (t) => { - t.plan(1) - npm.output = out => t.notMatch( - out, - 'looking for funding', - 'should not print fund message if missing info' - ) - reifyOutput(npm, { +t.test('missing info', async t => { + const out = await mockReify(t, { actualTree: { children: [], }, @@ -27,36 +24,30 @@ t.test('missing info', (t) => { children: [], }, }) -}) -t.test('even more missing info', t => { - t.plan(1) - npm.output = out => t.notMatch( + t.notMatch( out, 'looking for funding', 'should not print fund message if missing info' ) +}) - reifyOutput(npm, { +t.test('even more missing info', async t => { + const out = await mockReify(t, { actualTree: { children: [], }, }) -}) -t.test('single package', (t) => { - t.plan(1) - npm.output = out => { - if (out.endsWith('looking for funding')) { - t.match( - out, - '1 package is looking for funding', - 'should print single package message' - ) - } - } + t.notMatch( + out, + 'looking for funding', + 'should not print fund message if missing info' + ) +}) - reifyOutput(npm, { +t.test('single package', async t => { + const out = await mockReify(t, { // a report with an error is the same as no report at all, if // the command is not 'audit' auditReport: { @@ -87,20 +78,16 @@ t.test('single package', (t) => { children: [], }, }) -}) -t.test('no message when funding config is false', (t) => { - t.teardown(() => { - settings.fund = true - }) - settings.fund = false - npm.output = out => { - if (out.endsWith('looking for funding')) { - t.fail('should not print funding info', { actual: out }) - } - } + t.match( + out, + '1 package is looking for funding', + 'should print single package message' + ) +}) - reifyOutput(npm, { +t.test('no message when funding config is false', async t => { + const out = await mockReify(t, { actualTree: { name: 'foo', package: { @@ -123,24 +110,13 @@ t.test('no message when funding config is false', (t) => { diff: { children: [], }, - }) + }, { fund: false }) - t.end() + t.notMatch(out, 'looking for funding', 'should not print funding info') }) -t.test('print appropriate message for many packages', (t) => { - t.plan(1) - npm.output = out => { - if (out.endsWith('looking for funding')) { - t.match( - out, - '3 packages are looking for funding', - 'should print single package message' - ) - } - } - - reifyOutput(npm, { +t.test('print appropriate message for many packages', async t => { + const out = await mockReify(t, { actualTree: { name: 'foo', package: { @@ -184,6 +160,12 @@ t.test('print appropriate message for many packages', (t) => { children: [], }, }) + + t.match( + out, + '3 packages are looking for funding', + 'should print single package message' + ) }) t.test('showing and not showing audit report', async t => { @@ -231,15 +213,8 @@ t.test('showing and not showing audit report', async t => { }, } - t.test('no output when silent', t => { - t.teardown(() => { - delete npm.silent - }) - npm.silent = true - npm.output = out => { - t.fail('should not get output when silent', { actual: out }) - } - reifyOutput(npm, { + t.test('no output when silent', async t => { + const out = await mockReify(t, { actualTree: { inventory: { size: 999 }, children: [] }, auditReport, diff: { @@ -247,16 +222,12 @@ t.test('showing and not showing audit report', async t => { { action: 'ADD', ideal: { location: 'loc' } }, ], }, - }) - t.end() + }, { silent: true }) + t.equal(out, '', 'should not get output when silent') }) - t.test('output when not silent', t => { - const OUT = [] - npm.output = out => { - OUT.push(out) - } - reifyOutput(npm, { + t.test('output when not silent', async t => { + const out = await mockReify(t, { actualTree: { inventory: new Map(), children: [] }, auditReport, diff: { @@ -265,33 +236,14 @@ t.test('showing and not showing audit report', async t => { ], }, }) - t.match(OUT.join('\n'), /Run `npm audit` for details\.$/, 'got audit report') - t.end() + + t.match(out, /Run `npm audit` for details\.$/, 'got audit report') }) for (const json of [true, false]) { - t.test(`json=${json}`, t => { - t.teardown(() => { - delete npm.flatOptions.json - }) - npm.flatOptions.json = json - t.test('set exit code when cmd is audit', t => { - npm.output = () => {} - const { exitCode } = process - const { command } = npm - npm.flatOptions.auditLevel = 'low' - t.teardown(() => { - delete npm.flatOptions.auditLevel - npm.command = command - // only set exitCode back if we're passing tests - if (t.passing()) { - process.exitCode = exitCode - } - }) - - process.exitCode = 0 - npm.command = 'audit' - reifyOutput(npm, { + t.test(`json=${json}`, async t => { + t.test('set exit code when cmd is audit', async t => { + await mockReify(t, { actualTree: { inventory: new Map(), children: [] }, auditReport, diff: { @@ -299,29 +251,13 @@ t.test('showing and not showing audit report', async t => { { action: 'ADD', ideal: { location: 'loc' } }, ], }, - }) + }, { command: 'audit', 'audit-level': 'low' }) t.equal(process.exitCode, 1, 'set exit code') - t.end() }) - t.test('do not set exit code when cmd is install', t => { - npm.output = () => {} - const { exitCode } = process - const { command } = npm - npm.flatOptions.auditLevel = 'low' - t.teardown(() => { - delete npm.flatOptions.auditLevel - npm.command = command - // only set exitCode back if we're passing tests - if (t.passing()) { - process.exitCode = exitCode - } - }) - - process.exitCode = 0 - npm.command = 'install' - reifyOutput(npm, { + t.test('do not set exit code when cmd is install', async t => { + await mockReify(t, { actualTree: { inventory: new Map(), children: [] }, auditReport, diff: { @@ -329,28 +265,17 @@ t.test('showing and not showing audit report', async t => { { action: 'ADD', ideal: { location: 'loc' } }, ], }, - }) + }, { command: 'install', 'audit-level': 'low' }) - t.equal(process.exitCode, 0, 'did not set exit code') - t.end() + t.notOk(process.exitCode, 'did not set exit code') }) - t.end() }) } - - t.end() }) -t.test('packages changed message', t => { - const output = [] - npm.output = out => { - output.push(out) - } - +t.test('packages changed message', async t => { // return a test function that builds up the mock and snapshots output - const testCase = (t, added, removed, changed, audited, json, command) => { - settings.json = json - npm.command = command + const testCase = async (t, added, removed, changed, audited, json, command) => { const mock = { actualTree: { inventory: { size: audited, has: () => true }, @@ -384,9 +309,9 @@ t.test('packages changed message', t => { const ideal = { location: 'loc' } mock.diff.children.push({ action: 'CHANGE', actual, ideal }) } - output.length = 0 - reifyOutput(npm, mock) - t.matchSnapshot(output.join('\n'), JSON.stringify({ + + const out = await mockReify(t, mock, { json, command }) + t.matchSnapshot(out, JSON.stringify({ added, removed, changed, @@ -412,20 +337,14 @@ t.test('packages changed message', t => { cases.push([0, 0, 0, 2, true, 'audit']) cases.push([0, 0, 0, 2, false, 'audit']) - t.plan(cases.length) - for (const [added, removed, changed, audited, json, command] of cases) { - testCase(t, added, removed, changed, audited, json, command) + for (const c of cases) { + await t.test('', t => testCase(t, ...c)) } - - t.end() }) -t.test('added packages should be looked up within returned tree', t => { - t.test('has added pkg in inventory', t => { - t.plan(1) - npm.output = out => t.matchSnapshot(out) - - reifyOutput(npm, { +t.test('added packages should be looked up within returned tree', async t => { + t.test('has added pkg in inventory', async t => { + const out = await mockReify(t, { actualTree: { name: 'foo', inventory: { @@ -438,13 +357,12 @@ t.test('added packages should be looked up within returned tree', t => { ], }, }) - }) - t.test('missing added pkg in inventory', t => { - t.plan(1) - npm.output = out => t.matchSnapshot(out) + t.matchSnapshot(out) + }) - reifyOutput(npm, { + t.test('missing added pkg in inventory', async t => { + const out = await mockReify(t, { actualTree: { name: 'foo', inventory: { @@ -457,6 +375,7 @@ t.test('added packages should be looked up within returned tree', t => { ], }, }) + + t.matchSnapshot(out) }) - t.end() }) diff --git a/deps/npm/test/lib/utils/tar.js b/deps/npm/test/lib/utils/tar.js index f72b1432c89d6f..78c01f3f57ae40 100644 --- a/deps/npm/test/lib/utils/tar.js +++ b/deps/npm/test/lib/utils/tar.js @@ -1,10 +1,11 @@ const t = require('tap') const pack = require('libnpmpack') const ssri = require('ssri') +const tmock = require('../../fixtures/tmock') const { getContents } = require('../../../lib/utils/tar.js') -const mockTar = ({ notice }) => t.mock('../../../lib/utils/tar.js', { +const mockTar = ({ notice }) => tmock(t, '{LIB}/utils/tar.js', { 'proc-log': { notice, }, diff --git a/deps/npm/test/lib/utils/timers.js b/deps/npm/test/lib/utils/timers.js index 23d8eb6e2cafef..74df6c28cd361c 100644 --- a/deps/npm/test/lib/utils/timers.js +++ b/deps/npm/test/lib/utils/timers.js @@ -2,10 +2,11 @@ const t = require('tap') const { resolve, join } = require('path') const fs = require('graceful-fs') const mockLogs = require('../../fixtures/mock-logs') +const tmock = require('../../fixtures/tmock') const mockTimers = (t, options) => { const { logs, logMocks } = mockLogs() - const Timers = t.mock('../../../lib/utils/timers', { + const Timers = tmock(t, '{LIB}/utils/timers', { ...logMocks, }) const timers = new Timers(options) diff --git a/deps/npm/test/lib/utils/update-notifier.js b/deps/npm/test/lib/utils/update-notifier.js index fa4a04bad9839c..e7830e6d9d66e0 100644 --- a/deps/npm/test/lib/utils/update-notifier.js +++ b/deps/npm/test/lib/utils/update-notifier.js @@ -1,4 +1,6 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') + let ciMock = {} const flatOptions = { global: false, cache: t.testdir() + '/_cacache' } @@ -17,7 +19,6 @@ let PACOTE_ERROR = null const pacote = { manifest: async (spec, opts) => { if (!spec.match(/^npm@/)) { - console.error(new Error('should only fetch manifest for npm')) process.exit(1) } MANIFEST_REQUEST.push(spec) @@ -53,22 +54,15 @@ const fs = { ...require('fs'), stat: (path, cb) => { if (basename(path) !== '_update-notifier-last-checked') { - console.error( - new Error('should only write to notifier last checked file') - ) process.exit(1) } process.nextTick(() => cb(STAT_ERROR, { mtime: new Date(STAT_MTIME) })) }, writeFile: (path, content, cb) => { if (content !== '') { - console.error(new Error('should not be writing content')) process.exit(1) } if (basename(path) !== '_update-notifier-last-checked') { - console.error( - new Error('should only write to notifier last checked file') - ) process.exit(1) } process.nextTick(() => cb(WRITE_ERROR)) @@ -85,7 +79,7 @@ t.afterEach(() => { const runUpdateNotifier = async ({ color = true, ...npmOptions } = {}) => { const _npm = { ...defaultNpm, ...npmOptions, logColor: color } - return t.mock('../../../lib/utils/update-notifier.js', { + return tmock(t, '{LIB}/utils/update-notifier.js', { 'ci-info': ciMock, pacote, fs, diff --git a/deps/npm/test/lib/utils/web-auth.js b/deps/npm/test/lib/utils/web-auth.js index ee8a17ecbc09d4..a4e8f4bbc755dc 100644 --- a/deps/npm/test/lib/utils/web-auth.js +++ b/deps/npm/test/lib/utils/web-auth.js @@ -1,10 +1,11 @@ const t = require('tap') +const tmock = require('../../fixtures/tmock') const webAuthCheckLogin = async () => { return { token: 'otp-token' } } -const webauth = t.mock('../../../lib/utils/web-auth.js', { +const webauth = tmock(t, '{LIB}/utils/web-auth.js', { 'npm-profile': { webAuthCheckLogin }, }) From 3e70b7d863bc2c77f4f72e9166aa3c7ac8651c2f Mon Sep 17 00:00:00 2001 From: Marco Ippolito Date: Tue, 17 Jan 2023 10:05:53 +0100 Subject: [PATCH 169/191] http: writeHead if statusmessage is undefined dont override headers PR-URL: https://github.com/nodejs/node/pull/46173 Fixes: https://github.com/nodejs/node/issues/32395 Reviewed-By: Yagiz Nizipli Reviewed-By: Paolo Insogna Reviewed-By: James M Snell Reviewed-By: Matteo Collina --- lib/_http_server.js | 2 +- test/parallel/test-http-write-head-2.js | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/lib/_http_server.js b/lib/_http_server.js index de7133241cc962..b46e7b0906dfea 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -352,7 +352,7 @@ function writeHead(statusCode, reason, obj) { // writeHead(statusCode[, headers]) if (!this.statusMessage) this.statusMessage = STATUS_CODES[statusCode] || 'unknown'; - obj = reason; + obj ??= reason; } this.statusCode = statusCode; diff --git a/test/parallel/test-http-write-head-2.js b/test/parallel/test-http-write-head-2.js index a47d0d72e3be56..d64b8259f01215 100644 --- a/test/parallel/test-http-write-head-2.js +++ b/test/parallel/test-http-write-head-2.js @@ -59,3 +59,21 @@ const http = require('http'); })); })); } + +{ + const server = http.createServer(common.mustCall((req, res) => { + res.writeHead(200, undefined, [ 'foo', 'bar' ]); + res.end(); + })); + + server.listen(0, common.mustCall(() => { + http.get({ port: server.address().port }, common.mustCall((res) => { + assert.strictEqual(res.statusMessage, 'OK'); + assert.strictEqual(res.statusCode, 200); + assert.strictEqual(res.headers.foo, 'bar'); + res.resume().on('end', common.mustCall(() => { + server.close(); + })); + })); + })); +} From 94605b16650e28f2b610014486b4b46c3729d757 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Tue, 17 Jan 2023 11:01:26 +0100 Subject: [PATCH 170/191] src: replace unreachable code with static_assert This function base64-decodes a given JavaScript string to obtain the secret key, whose length must not exceed INT_MAX. However, because JavaScript strings are limited to v8::String::kMaxLength chars and because base64 decoding never yields more bytes than input chars, the size of the decoded key must be strictly less than v8::String::kMaxLength bytes. Therefore, it is sufficient to statically assert that String::kMaxLength <= INT_MAX (which is always true because String::kMaxLength itself is an int). Aside from being unreachable, Coverity considers the current code "suspicious" because it indicates that buffers larger than INT_MAX might actually be allocated. PR-URL: https://github.com/nodejs/node/pull/46209 Reviewed-By: Luigi Pinca Reviewed-By: Yagiz Nizipli Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig Reviewed-By: Minwoo Jung Reviewed-By: Darshan Sen Reviewed-By: Filip Skokan --- src/crypto/crypto_keys.cc | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/crypto/crypto_keys.cc b/src/crypto/crypto_keys.cc index d1ea8f8f2cde7e..f5661ccedad4f7 100644 --- a/src/crypto/crypto_keys.cc +++ b/src/crypto/crypto_keys.cc @@ -479,12 +479,8 @@ std::shared_ptr ImportJWKSecretKey( return std::shared_ptr(); } + static_assert(String::kMaxLength <= INT_MAX); ByteSource key_data = ByteSource::FromEncodedString(env, key.As()); - if (key_data.size() > INT_MAX) { - THROW_ERR_CRYPTO_INVALID_KEYLEN(env); - return std::shared_ptr(); - } - return KeyObjectData::CreateSecret(std::move(key_data)); } From e620de64448d05077caa843bebf58354ddb8bd35 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Tue, 17 Jan 2023 22:40:39 +0900 Subject: [PATCH 171/191] http: refactor to use `validateHeaderName` Remove duplicate implementation by using validateHeaderName. PR-URL: https://github.com/nodejs/node/pull/46143 Reviewed-By: Matteo Collina Reviewed-By: Antoine du Hamel --- doc/api/http.md | 7 ++++++- lib/_http_outgoing.js | 8 +++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/doc/api/http.md b/doc/api/http.md index bb28a440b79fda..bbefe1e49a8199 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -3619,13 +3619,18 @@ Passing an `AbortSignal` and then calling `abort` on the corresponding `AbortController` will behave the same way as calling `.destroy()` on the request itself. -## `http.validateHeaderName(name)` +## `http.validateHeaderName(name[, label])` * `name` {string} +* `label` {string} Label for error message. **Default:** `'Header name'`. Performs the low-level validations on the provided `name` that are done when `res.setHeader(name, value)` is called. diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 8c80eabaec9e74..60ea7ca5ef9c29 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -628,9 +628,9 @@ function matchHeader(self, state, field, value) { } } -const validateHeaderName = hideStackFrames((name) => { +const validateHeaderName = hideStackFrames((name, label) => { if (typeof name !== 'string' || !name || !checkIsHttpToken(name)) { - throw new ERR_INVALID_HTTP_TOKEN('Header name', name); + throw new ERR_INVALID_HTTP_TOKEN(label || 'Header name', name); } }); @@ -933,9 +933,7 @@ OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { field = key; value = headers[key]; } - if (typeof field !== 'string' || !field || !checkIsHttpToken(field)) { - throw new ERR_INVALID_HTTP_TOKEN('Trailer name', field); - } + validateHeaderName(field, 'Trailer name'); // Check if the field must be sent several times const isArrayValue = ArrayIsArray(value); From 463bb9602eff5ca62d9322215c17f733694ec456 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Thu, 12 Jan 2023 22:11:11 +0100 Subject: [PATCH 172/191] esm: mark `importAssertions` as required MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We already always specify a value, and failing to do so would likely be a bug. PR-URL: https://github.com/nodejs/node/pull/46164 Reviewed-By: Geoffrey Booth Reviewed-By: Michaël Zasso --- lib/internal/modules/esm/loader.js | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 692ba834631f05..120ab8ccb0beae 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -782,15 +782,11 @@ class ESMLoader { * @param {string} originalSpecifier The specified URL path of the module to * be resolved. * @param {string} [parentURL] The URL path of the module's parent. - * @param {ImportAssertions} [importAssertions] Assertions from the import + * @param {ImportAssertions} importAssertions Assertions from the import * statement or expression. * @returns {{ format: string, url: URL['href'] }} */ - async resolve( - originalSpecifier, - parentURL, - importAssertions = ObjectCreate(null), - ) { + async resolve(originalSpecifier, parentURL, importAssertions) { const isMain = parentURL === undefined; if ( From 5294371063ee09e4a4826b81bb1300e6dcc96612 Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Fri, 6 Jan 2023 13:30:21 -0500 Subject: [PATCH 173/191] doc: add text around collaborative expectations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - add text discussed by the TSC Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/46121 Reviewed-By: Robert Nagy Reviewed-By: Geoffrey Booth Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Matteo Collina Reviewed-By: Gireesh Punathil Reviewed-By: Darshan Sen Reviewed-By: Beth Griggs Reviewed-By: Antoine du Hamel Reviewed-By: Joe Sepi Reviewed-By: Chengzhong Wu Reviewed-By: Rafael Gonzaga --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 3e35962e6f5932..fb3f183934db00 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,12 @@ For information on using Node.js, see the [Node.js website][]. The Node.js project uses an [open governance model](./GOVERNANCE.md). The [OpenJS Foundation][] provides support for the project. +Contributors are expected to act in a collaborative manner to move +the project forward. We encourage the constructive exchange of contrary +opinions and compromise. The [TSC](./GOVERNANCE.md#technical-steering-committee) +reserves the right to limit or block contributors who repeatedly act in ways +that discourage, exhaust, or otherwise negatively affect other participants. + **This project has a [Code of Conduct][].** ## Table of contents From a558774a40f93daef71c03c872ff66576c54a80e Mon Sep 17 00:00:00 2001 From: vitpavlenko Date: Tue, 17 Jan 2023 19:39:14 +0200 Subject: [PATCH 174/191] crypto: add cipher update/final methods encoding validation Refs #45189 PR-URL: https://github.com/nodejs/node/pull/45990 Refs: https://github.com/nodejs/node/issues/45189 Reviewed-By: James M Snell Reviewed-By: Filip Skokan --- lib/internal/crypto/cipher.js | 10 +++- .../test-crypto-encoding-validation-error.js | 52 +++++++++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 test/parallel/test-crypto-encoding-validation-error.js diff --git a/lib/internal/crypto/cipher.js b/lib/internal/crypto/cipher.js index d85606ba52b5ac..fe2cc0f5258d7d 100644 --- a/lib/internal/crypto/cipher.js +++ b/lib/internal/crypto/cipher.js @@ -27,6 +27,7 @@ const { ERR_CRYPTO_INVALID_STATE, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, + ERR_UNKNOWN_ENCODING, } } = require('internal/errors'); @@ -91,9 +92,14 @@ const privateDecrypt = rsaFunctionFor(_privateDecrypt, RSA_PKCS1_OAEP_PADDING, 'private'); function getDecoder(decoder, encoding) { - encoding = normalizeEncoding(encoding); + const normalizedEncoding = normalizeEncoding(encoding); decoder = decoder || new StringDecoder(encoding); - assert(decoder.encoding === encoding, 'Cannot change encoding'); + if (decoder.encoding !== normalizedEncoding) { + if (normalizedEncoding === undefined) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + assert(false, 'Cannot change encoding'); + } return decoder; } diff --git a/test/parallel/test-crypto-encoding-validation-error.js b/test/parallel/test-crypto-encoding-validation-error.js new file mode 100644 index 00000000000000..0e921ac2862f49 --- /dev/null +++ b/test/parallel/test-crypto-encoding-validation-error.js @@ -0,0 +1,52 @@ +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +// This test checks if error is thrown in case of wrong encoding provided into cipher. + +const assert = require('assert'); +const { createCipheriv, randomBytes } = require('crypto'); + +const createCipher = () => { + return createCipheriv('aes-256-cbc', randomBytes(32), randomBytes(16)); +}; + +{ + const cipher = createCipher(); + cipher.update('test', 'utf-8', 'utf-8'); + + assert.throws( + () => cipher.update('666f6f', 'hex', 'hex'), + { message: /Cannot change encoding/ } + ); +} + +{ + const cipher = createCipher(); + cipher.update('test', 'utf-8', 'utf-8'); + + assert.throws( + () => cipher.final('hex'), + { message: /Cannot change encoding/ } + ); +} + +{ + const cipher = createCipher(); + cipher.update('test', 'utf-8', 'utf-8'); + + assert.throws( + () => cipher.final('bad2'), + { message: /^Unknown encoding: bad2$/, code: 'ERR_UNKNOWN_ENCODING' } + ); +} + +{ + const cipher = createCipher(); + + assert.throws( + () => cipher.update('test', 'utf-8', 'bad3'), + { message: /^Unknown encoding: bad3$/, code: 'ERR_UNKNOWN_ENCODING' } + ); +} From a9db45eee1df81cda4a0e03e1554986a37018259 Mon Sep 17 00:00:00 2001 From: Tim Gerk Date: Tue, 17 Jan 2023 17:53:41 -0800 Subject: [PATCH 175/191] doc: add note to tls docs about secureContext availability tls.createServer() and new tls.Server() ignore secureContext option. PR-URL: https://github.com/nodejs/node/pull/46224 Reviewed-By: Luigi Pinca Reviewed-By: Paolo Insogna Reviewed-By: Rich Trott --- doc/api/tls.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/api/tls.md b/doc/api/tls.md index 170e189a70c56e..075c169c93b664 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1938,8 +1938,9 @@ from `process.argv` as the default value of the `sessionIdContext` option, other APIs that create secure contexts have no default value. The `tls.createSecureContext()` method creates a `SecureContext` object. It is -usable as an argument to several `tls` APIs, such as [`tls.createServer()`][] -and [`server.addContext()`][], but has no public methods. +usable as an argument to several `tls` APIs, such as [`server.addContext()`][], +but has no public methods. The [`tls.Server`][] constructor and the +[`tls.createServer()`][] method do not support the `secureContext` option. A key is _required_ for ciphers that use certificates. Either `key` or `pfx` can be used to provide it. From 896027c00620101db39c55b8eef5e213dec792ee Mon Sep 17 00:00:00 2001 From: Suyash Nayan <89125422+7suyash7@users.noreply.github.com> Date: Wed, 18 Jan 2023 12:40:48 +0530 Subject: [PATCH 176/191] test: add fix so that test exits if port 42 is unprivileged PR-URL: https://github.com/nodejs/node/pull/45904 Fixes: https://github.com/nodejs/node/issues/45838 Reviewed-By: Ben Noordhuis Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca --- .../test-cluster-bind-privileged-port.js | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-cluster-bind-privileged-port.js b/test/parallel/test-cluster-bind-privileged-port.js index b952ac1c6ce5d3..1249230177d2ec 100644 --- a/test/parallel/test-cluster-bind-privileged-port.js +++ b/test/parallel/test-cluster-bind-privileged-port.js @@ -21,6 +21,18 @@ 'use strict'; const common = require('../common'); +const assert = require('assert'); +const cluster = require('cluster'); +const net = require('net'); +const { execSync } = require('child_process'); + +if (common.isLinux) { + const sysctlOutput = execSync('sysctl net.ipv4.ip_unprivileged_port_start').toString(); + const unprivilegedPortStart = parseInt(sysctlOutput.split(' ')[2], 10); + if (unprivilegedPortStart <= 42) { + common.skip('Port 42 is unprivileged'); + } +} // Skip on OS X Mojave. https://github.com/nodejs/node/issues/21679 if (common.isOSX) @@ -35,10 +47,6 @@ if (common.isWindows) if (process.getuid() === 0) common.skip('Test is not supposed to be run as root.'); -const assert = require('assert'); -const cluster = require('cluster'); -const net = require('net'); - if (cluster.isPrimary) { cluster.fork().on('exit', common.mustCall((exitCode) => { assert.strictEqual(exitCode, 0); From cde59606cd7fb436f2dc0dd06e3bbdd5431e61b9 Mon Sep 17 00:00:00 2001 From: Debadree Chatterjee Date: Wed, 18 Jan 2023 13:26:49 +0530 Subject: [PATCH 177/191] stream: implement finished() for ReadableStream and WritableStream Refs: https://github.com/nodejs/node/issues/39316 PR-URL: https://github.com/nodejs/node/pull/46205 Reviewed-By: Robert Nagy Reviewed-By: Matteo Collina Reviewed-By: Darshan Sen Reviewed-By: James M Snell --- lib/internal/streams/end-of-stream.js | 25 ++- lib/internal/streams/utils.js | 25 +++ lib/internal/webstreams/readablestream.js | 14 +- lib/internal/webstreams/writablestream.js | 14 +- test/parallel/test-webstreams-finished.js | 232 ++++++++++++++++++++++ 5 files changed, 301 insertions(+), 9 deletions(-) create mode 100644 test/parallel/test-webstreams-finished.js diff --git a/lib/internal/streams/end-of-stream.js b/lib/internal/streams/end-of-stream.js index ca42174c86459a..07f80aedc69cd5 100644 --- a/lib/internal/streams/end-of-stream.js +++ b/lib/internal/streams/end-of-stream.js @@ -22,20 +22,23 @@ const { validateBoolean } = require('internal/validators'); -const { Promise } = primordials; +const { Promise, PromisePrototypeThen } = primordials; const { isClosed, isReadable, isReadableNodeStream, + isReadableStream, isReadableFinished, isReadableErrored, isWritable, isWritableNodeStream, + isWritableStream, isWritableFinished, isWritableErrored, isNodeStream, willEmitClose: _willEmitClose, + kIsClosedPromise, } = require('internal/streams/utils'); function isRequest(stream) { @@ -58,14 +61,17 @@ function eos(stream, options, callback) { callback = once(callback); - const readable = options.readable ?? isReadableNodeStream(stream); - const writable = options.writable ?? isWritableNodeStream(stream); + if (isReadableStream(stream) || isWritableStream(stream)) { + return eosWeb(stream, options, callback); + } if (!isNodeStream(stream)) { - // TODO: Webstreams. - throw new ERR_INVALID_ARG_TYPE('stream', 'Stream', stream); + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream); } + const readable = options.readable ?? isReadableNodeStream(stream); + const writable = options.writable ?? isWritableNodeStream(stream); + const wState = stream._writableState; const rState = stream._readableState; @@ -255,6 +261,15 @@ function eos(stream, options, callback) { return cleanup; } +function eosWeb(stream, opts, callback) { + PromisePrototypeThen( + stream[kIsClosedPromise].promise, + () => process.nextTick(() => callback.call(stream)), + (err) => process.nextTick(() => callback.call(stream, err)), + ); + return nop; +} + function finished(stream, opts) { let autoCleanup = false; if (opts === null) { diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js index 4d4f00ab456fa7..9d08af6f31a280 100644 --- a/lib/internal/streams/utils.js +++ b/lib/internal/streams/utils.js @@ -4,6 +4,7 @@ const { Symbol, SymbolAsyncIterator, SymbolIterator, + SymbolFor, } = primordials; const kDestroyed = Symbol('kDestroyed'); @@ -11,6 +12,8 @@ const kIsErrored = Symbol('kIsErrored'); const kIsReadable = Symbol('kIsReadable'); const kIsDisturbed = Symbol('kIsDisturbed'); +const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise'); + function isReadableNodeStream(obj, strict = false) { return !!( obj && @@ -55,6 +58,25 @@ function isNodeStream(obj) { ); } +function isReadableStream(obj) { + return !!( + obj && + !isNodeStream(obj) && + typeof obj.pipeThrough === 'function' && + typeof obj.getReader === 'function' && + typeof obj.cancel === 'function' + ); +} + +function isWritableStream(obj) { + return !!( + obj && + !isNodeStream(obj) && + typeof obj.getWriter === 'function' && + typeof obj.abort === 'function' + ); +} + function isIterable(obj, isAsync) { if (obj == null) return false; if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function'; @@ -269,18 +291,21 @@ module.exports = { kIsErrored, isReadable, kIsReadable, + kIsClosedPromise, isClosed, isDestroyed, isDuplexNodeStream, isFinished, isIterable, isReadableNodeStream, + isReadableStream, isReadableEnded, isReadableFinished, isReadableErrored, isNodeStream, isWritable, isWritableNodeStream, + isWritableStream, isWritableEnded, isWritableFinished, isWritableErrored, diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index e94f8fbcf156c4..2a662ac8d2c2ef 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -85,6 +85,7 @@ const { kIsDisturbed, kIsErrored, kIsReadable, + kIsClosedPromise, } = require('internal/streams/utils'); const { @@ -258,9 +259,11 @@ class ReadableStream { port1: undefined, port2: undefined, promise: undefined, - } + }, }; + this[kIsClosedPromise] = createDeferredPromise(); + // The spec requires handling of the strategy first // here. Specifically, if getting the size and // highWaterMark from the strategy fail, that has @@ -652,8 +655,9 @@ function TransferredReadableStream() { writable: undefined, port: undefined, promise: undefined, - } + }, }; + this[kIsClosedPromise] = createDeferredPromise(); }, [], ReadableStream)); } @@ -1213,8 +1217,9 @@ function createTeeReadableStream(start, pull, cancel) { writable: undefined, port: undefined, promise: undefined, - } + }, }; + this[kIsClosedPromise] = createDeferredPromise(); setupReadableStreamDefaultControllerFromSource( this, ObjectCreate(null, { @@ -1887,6 +1892,7 @@ function readableStreamCancel(stream, reason) { function readableStreamClose(stream) { assert(stream[kState].state === 'readable'); stream[kState].state = 'closed'; + stream[kIsClosedPromise].resolve(); const { reader, @@ -1908,6 +1914,8 @@ function readableStreamError(stream, error) { assert(stream[kState].state === 'readable'); stream[kState].state = 'errored'; stream[kState].storedError = error; + stream[kIsClosedPromise].reject(error); + setPromiseHandled(stream[kIsClosedPromise].promise); const { reader diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js index 3d5851753057a9..f0fdf35d643695 100644 --- a/lib/internal/webstreams/writablestream.js +++ b/lib/internal/webstreams/writablestream.js @@ -69,6 +69,10 @@ const { kState, } = require('internal/webstreams/util'); +const { + kIsClosedPromise, +} = require('internal/streams/utils'); + const { AbortController, } = require('internal/abort_controller'); @@ -191,9 +195,11 @@ class WritableStream { port1: undefined, port2: undefined, promise: undefined, - } + }, }; + this[kIsClosedPromise] = createDeferredPromise(); + const size = extractSizeAlgorithm(strategy?.size); const highWaterMark = extractHighWaterMark(strategy?.highWaterMark, 1); @@ -363,6 +369,7 @@ function TransferredWritableStream() { readable: undefined, }, }; + this[kIsClosedPromise] = createDeferredPromise(); }, [], WritableStream)); } @@ -742,6 +749,10 @@ function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { resolve: undefined, }; } + + stream[kIsClosedPromise].reject(stream[kState]?.storedError); + setPromiseHandled(stream[kIsClosedPromise].promise); + const { writer, } = stream[kState]; @@ -855,6 +866,7 @@ function writableStreamFinishInFlightClose(stream) { stream[kState].state = 'closed'; if (stream[kState].writer !== undefined) stream[kState].writer[kState].close.resolve?.(); + stream[kIsClosedPromise].resolve?.(); assert(stream[kState].pendingAbortRequest.abort.promise === undefined); assert(stream[kState].storedError === undefined); } diff --git a/test/parallel/test-webstreams-finished.js b/test/parallel/test-webstreams-finished.js new file mode 100644 index 00000000000000..65a14d863eb922 --- /dev/null +++ b/test/parallel/test-webstreams-finished.js @@ -0,0 +1,232 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { ReadableStream, WritableStream } = require('stream/web'); +const { finished } = require('stream'); +const { finished: finishedPromise } = require('stream/promises'); + +{ + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('asd'); + controller.close(); + }, + }); + finished(rs, common.mustSucceed()); + async function test() { + const values = []; + for await (const chunk of rs) { + values.push(chunk); + } + assert.deepStrictEqual(values, ['asd']); + } + test(); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.error(new Error('asd')); + } + }); + + finished(rs, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); +} + +{ + const rs = new ReadableStream({ + async start(controller) { + throw new Error('asd'); + } + }); + + finished(rs, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('asd'); + controller.close(); + } + }); + + async function test() { + const values = []; + for await (const chunk of rs) { + values.push(chunk); + } + assert.deepStrictEqual(values, ['asd']); + } + + finishedPromise(rs).then(common.mustSucceed()); + + test(); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.error(new Error('asd')); + } + }); + + finishedPromise(rs).then(common.mustNotCall()).catch(common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); +} + +{ + const rs = new ReadableStream({ + async start(controller) { + throw new Error('asd'); + } + }); + + finishedPromise(rs).then(common.mustNotCall()).catch(common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('asd'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = rs.tee(); + + finished(s1, common.mustSucceed()); + finished(s2, common.mustSucceed()); + + async function test(stream) { + const values = []; + for await (const chunk of stream) { + values.push(chunk); + } + assert.deepStrictEqual(values, ['asd']); + } + + Promise.all([ + test(s1), + test(s2), + ]).then(common.mustCall()); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.error(new Error('asd')); + } + }); + + const { 0: s1, 1: s2 } = rs.tee(); + + finished(s1, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); + + finished(s2, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); +} + +{ + const rs = new ReadableStream({ + start(controller) { + controller.enqueue('asd'); + controller.close(); + } + }); + + finished(rs, common.mustSucceed()); + + rs.cancel(); +} + +{ + let str = ''; + const ws = new WritableStream({ + write(chunk) { + str += chunk; + } + }); + + finished(ws, common.mustSucceed(() => { + assert.strictEqual(str, 'asd'); + })); + + const writer = ws.getWriter(); + writer.write('asd'); + writer.close(); +} + +{ + const ws = new WritableStream({ + async write(chunk) { + throw new Error('asd'); + } + }); + + finished(ws, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); + + const writer = ws.getWriter(); + writer.write('asd').catch((err) => { + assert.strictEqual(err?.message, 'asd'); + }); +} + +{ + let str = ''; + const ws = new WritableStream({ + write(chunk) { + str += chunk; + } + }); + + finishedPromise(ws).then(common.mustSucceed(() => { + assert.strictEqual(str, 'asd'); + })); + + const writer = ws.getWriter(); + writer.write('asd'); + writer.close(); +} + +{ + const ws = new WritableStream({ + write(chunk) { } + }); + finished(ws, common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); + + const writer = ws.getWriter(); + writer.abort(new Error('asd')); +} + +{ + const ws = new WritableStream({ + async write(chunk) { + throw new Error('asd'); + } + }); + + finishedPromise(ws).then(common.mustNotCall()).catch(common.mustCall((err) => { + assert.strictEqual(err?.message, 'asd'); + })); + + const writer = ws.getWriter(); + writer.write('asd').catch((err) => { + assert.strictEqual(err?.message, 'asd'); + }); +} From eaeb870cd73d3f92ca5bd3a60944274d4fc8bec6 Mon Sep 17 00:00:00 2001 From: npm CLI robot Date: Wed, 18 Jan 2023 09:48:47 -0500 Subject: [PATCH 178/191] deps: upgrade npm to 9.3.1 PR-URL: https://github.com/nodejs/node/pull/46242 Reviewed-By: Ruy Adorno Reviewed-By: Myles Borins Reviewed-By: Rich Trott Reviewed-By: Luigi Pinca --- deps/npm/docs/content/commands/npm-ls.md | 2 +- deps/npm/docs/content/commands/npm.md | 2 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm.html | 2 +- deps/npm/lib/commands/ci.js | 2 +- deps/npm/lib/commands/deprecate.js | 24 ++++++++++++----------- deps/npm/man/man1/npm-ls.1 | 2 +- deps/npm/man/man1/npm.1 | 2 +- deps/npm/package.json | 2 +- deps/npm/test/lib/commands/ci.js | 24 +++++++++++++++++++++++ deps/npm/test/lib/commands/deprecate.js | 16 +++++++++++++++ 11 files changed, 61 insertions(+), 19 deletions(-) diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index d8b6f4a7de63dc..4690d7752f0ad1 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@9.3.0 /path/to/npm +npm@9.3.1 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 2396b40cf5e8c7..f2fdaee318d643 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -9.3.0 +9.3.1 ### Description diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index ffe72db7c54b8c..4bb067ab67a65b 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@9.3.0 /path/to/npm
    +
    npm@9.3.1 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index cb6d4ea3c7b81e..76f4fc5cc17c9c 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -150,7 +150,7 @@

    Table of contents

    Note: This command is unaware of workspaces.

    Version

    -

    9.3.0

    +

    9.3.1

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/lib/commands/ci.js b/deps/npm/lib/commands/ci.js index a2c61044eb96ee..4dd7898dc6182a 100644 --- a/deps/npm/lib/commands/ci.js +++ b/deps/npm/lib/commands/ci.js @@ -67,7 +67,7 @@ class CI extends ArboristWorkspaceCmd { const path = `${where}/node_modules` // get the list of entries so we can skip the glob for performance const entries = await fs.readdir(path, null).catch(er => []) - return Promise.all(entries.map(f => fs.rm(`${path}/${f}`, { force: true }))) + return Promise.all(entries.map(f => fs.rm(`${path}/${f}`, { force: true, recursive: true }))) }) await arb.reify(opts) diff --git a/deps/npm/lib/commands/deprecate.js b/deps/npm/lib/commands/deprecate.js index 1e1b8994c55a37..844d5f60a02abe 100644 --- a/deps/npm/lib/commands/deprecate.js +++ b/deps/npm/lib/commands/deprecate.js @@ -52,19 +52,21 @@ class Deprecate extends BaseCommand { query: { write: true }, }) - Object.keys(packument.versions) + const versions = Object.keys(packument.versions) .filter(v => semver.satisfies(v, spec, { includePrerelease: true })) - .forEach(v => { - packument.versions[v].deprecated = msg - }) - return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, { - ...opts, - spec: p, - method: 'PUT', - body: packument, - ignoreBody: true, - })) + if (versions.length) { + for (const v of versions) { + packument.versions[v].deprecated = msg + } + return otplease(this.npm, this.npm.flatOptions, opts => fetch(uri, { + ...opts, + spec: p, + method: 'PUT', + body: packument, + ignoreBody: true, + })) + } } } diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 2458a585b79269..76a67908222357 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@9.3.0 /path/to/npm +npm@9.3.1 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index e29df213a0402e..1286027b16231c 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -9.3.0 +9.3.1 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/package.json b/deps/npm/package.json index 73c41bc2b4fd29..423024ae21c86d 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "9.3.0", + "version": "9.3.1", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ diff --git a/deps/npm/test/lib/commands/ci.js b/deps/npm/test/lib/commands/ci.js index bd3fd9be9a6a09..000ddc0eb82705 100644 --- a/deps/npm/test/lib/commands/ci.js +++ b/deps/npm/test/lib/commands/ci.js @@ -79,6 +79,30 @@ t.test('reifies, audits, removes node_modules', async t => { t.equal(fs.existsSync(nmAbbrev), true, 'installs abbrev') }) +t.test('reifies, audits, removes node_modules on repeat run', async t => { + const { npm, joinedOutput, registry } = await loadMockNpm(t, { + prefixDir: { + abbrev: abbrev, + 'package.json': JSON.stringify(packageJson), + 'package-lock.json': JSON.stringify(packageLock), + node_modules: { test: 'test file that will be removed' }, + }, + }) + const manifest = registry.manifest({ name: 'abbrev' }) + await registry.tarball({ + manifest: manifest.versions['1.0.0'], + tarball: path.join(npm.prefix, 'abbrev'), + }) + registry.nock.post('/-/npm/v1/security/advisories/bulk').reply(200, {}) + await npm.exec('ci', []) + await npm.exec('ci', []) + t.match(joinedOutput(), 'added 1 package, and audited 2 packages in') + const nmTest = path.join(npm.prefix, 'node_modules', 'test') + t.equal(fs.existsSync(nmTest), false, 'existing node_modules is removed') + const nmAbbrev = path.join(npm.prefix, 'node_modules', 'abbrev') + t.equal(fs.existsSync(nmAbbrev), true, 'installs abbrev') +}) + t.test('--no-audit and --ignore-scripts', async t => { const { npm, joinedOutput, registry } = await loadMockNpm(t, { config: { diff --git a/deps/npm/test/lib/commands/deprecate.js b/deps/npm/test/lib/commands/deprecate.js index 22ddfe182de6ef..48513c7303a015 100644 --- a/deps/npm/test/lib/commands/deprecate.js +++ b/deps/npm/test/lib/commands/deprecate.js @@ -153,3 +153,19 @@ t.test('deprecates all versions when no range is specified', async t => { await npm.exec('deprecate', ['foo', message]) t.match(joinedOutput(), '') }) + +t.test('does nothing if version does not actually exist', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { config: { ...auth } }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + const manifest = registry.manifest({ + name: 'foo', + versions, + }) + await registry.package({ manifest, query: { write: true } }) + await npm.exec('deprecate', ['foo@1.0.99', 'this should be ignored']) + t.match(joinedOutput(), '') +}) From 52a7887b9452a5aee056656eb60a0f502bf38295 Mon Sep 17 00:00:00 2001 From: Filip Skokan Date: Wed, 18 Jan 2023 19:07:26 +0100 Subject: [PATCH 179/191] crypto: add CryptoKey Symbol.toStringTag MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit closes #45987 PR-URL: https://github.com/nodejs/node/pull/46042 Fixes: https://github.com/nodejs/node/issues/45987 Reviewed-By: Antoine du Hamel Reviewed-By: Tobias Nießen Reviewed-By: Juan José Arboleda --- lib/internal/crypto/keys.js | 5 +++++ test/parallel/test-webcrypto-keygen.js | 8 ++++++++ 2 files changed, 13 insertions(+) diff --git a/lib/internal/crypto/keys.js b/lib/internal/crypto/keys.js index 917853af98a4c3..bb7dd7dbd0d1eb 100644 --- a/lib/internal/crypto/keys.js +++ b/lib/internal/crypto/keys.js @@ -696,6 +696,11 @@ ObjectDefineProperties(CryptoKey.prototype, { extractable: kEnumerableProperty, algorithm: kEnumerableProperty, usages: kEnumerableProperty, + [SymbolToStringTag]: { + __proto__: null, + configurable: true, + value: 'CryptoKey', + }, }); // All internal code must use new InternalCryptoKey to create diff --git a/test/parallel/test-webcrypto-keygen.js b/test/parallel/test-webcrypto-keygen.js index b0977d5935082e..946a1e58889122 100644 --- a/test/parallel/test-webcrypto-keygen.js +++ b/test/parallel/test-webcrypto-keygen.js @@ -277,6 +277,8 @@ const vectors = { assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(publicKey.toString(), '[object CryptoKey]'); + assert.strictEqual(privateKey.toString(), '[object CryptoKey]'); assert.strictEqual(publicKey.extractable, true); assert.strictEqual(privateKey.extractable, true); assert.deepStrictEqual(publicKey.usages, publicUsages); @@ -439,6 +441,8 @@ const vectors = { assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(publicKey.toString(), '[object CryptoKey]'); + assert.strictEqual(privateKey.toString(), '[object CryptoKey]'); assert.strictEqual(publicKey.extractable, true); assert.strictEqual(privateKey.extractable, true); assert.deepStrictEqual(publicKey.usages, publicUsages); @@ -503,6 +507,7 @@ const vectors = { assert(isCryptoKey(key)); assert.strictEqual(key.type, 'secret'); + assert.strictEqual(key.toString(), '[object CryptoKey]'); assert.strictEqual(key.extractable, true); assert.deepStrictEqual(key.usages, usages); assert.strictEqual(key.algorithm.name, name); @@ -562,6 +567,7 @@ const vectors = { assert(isCryptoKey(key)); assert.strictEqual(key.type, 'secret'); + assert.strictEqual(key.toString(), '[object CryptoKey]'); assert.strictEqual(key.extractable, true); assert.deepStrictEqual(key.usages, usages); assert.strictEqual(key.algorithm.name, 'HMAC'); @@ -629,6 +635,8 @@ assert.throws(() => new CryptoKey(), { code: 'ERR_ILLEGAL_CONSTRUCTOR' }); assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(publicKey.toString(), '[object CryptoKey]'); + assert.strictEqual(privateKey.toString(), '[object CryptoKey]'); assert.strictEqual(publicKey.extractable, true); assert.strictEqual(privateKey.extractable, true); assert.deepStrictEqual(publicKey.usages, publicUsages); From 6317502d108b40986eee35265b63a6a690e606ad Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Thu, 19 Jan 2023 04:25:11 +0900 Subject: [PATCH 180/191] doc: update events API example to have runnable code Fixes: https://github.com/nodejs/node/issues/45755 PR-URL: https://github.com/nodejs/node/pull/45760 Reviewed-By: Luigi Pinca Reviewed-By: Tierney Cyren Reviewed-By: James M Snell --- doc/api/events.md | 297 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 281 insertions(+), 16 deletions(-) diff --git a/doc/api/events.md b/doc/api/events.md index b53c6a6bacaf11..0b61c1add6f20e 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -62,7 +62,26 @@ an ordinary listener function is called, the standard `this` keyword is intentionally set to reference the `EventEmitter` instance to which the listener is attached. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +myEmitter.on('event', function(a, b) { + console.log(a, b, this, this === myEmitter); + // Prints: + // a b MyEmitter { + // _events: [Object: null prototype] { event: [Function (anonymous)] }, + // _eventsCount: 1, + // _maxListeners: undefined, + // [Symbol(kCapture)]: false + // } true +}); +myEmitter.emit('event', 'a', 'b'); +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); myEmitter.on('event', function(a, b) { console.log(a, b, this, this === myEmitter); @@ -80,7 +99,20 @@ myEmitter.emit('event', 'a', 'b'); It is possible to use ES6 Arrow Functions as listeners, however, when doing so, the `this` keyword will no longer reference the `EventEmitter` instance: -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +myEmitter.on('event', (a, b) => { + console.log(a, b, this); + // Prints: a b {} +}); +myEmitter.emit('event', 'a', 'b'); +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); myEmitter.on('event', (a, b) => { console.log(a, b, this); @@ -97,7 +129,21 @@ events and helps avoid race conditions and logic errors. When appropriate, listener functions can switch to an asynchronous mode of operation using the `setImmediate()` or `process.nextTick()` methods: -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +myEmitter.on('event', (a, b) => { + setImmediate(() => { + console.log('this happens asynchronously'); + }); +}); +myEmitter.emit('event', 'a', 'b'); +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); myEmitter.on('event', (a, b) => { setImmediate(() => { @@ -112,7 +158,23 @@ myEmitter.emit('event', 'a', 'b'); When a listener is registered using the `eventEmitter.on()` method, that listener is invoked _every time_ the named event is emitted. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +let m = 0; +myEmitter.on('event', () => { + console.log(++m); +}); +myEmitter.emit('event'); +// Prints: 1 +myEmitter.emit('event'); +// Prints: 2 +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); let m = 0; myEmitter.on('event', () => { @@ -128,7 +190,23 @@ Using the `eventEmitter.once()` method, it is possible to register a listener that is called at most once for a particular event. Once the event is emitted, the listener is unregistered and _then_ called. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +let m = 0; +myEmitter.once('event', () => { + console.log(++m); +}); +myEmitter.emit('event'); +// Prints: 1 +myEmitter.emit('event'); +// Ignored +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); let m = 0; myEmitter.once('event', () => { @@ -150,7 +228,17 @@ If an `EventEmitter` does _not_ have at least one listener registered for the `'error'` event, and an `'error'` event is emitted, the error is thrown, a stack trace is printed, and the Node.js process exits. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +myEmitter.emit('error', new Error('whoops!')); +// Throws and crashes Node.js +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); myEmitter.emit('error', new Error('whoops!')); // Throws and crashes Node.js @@ -161,7 +249,20 @@ used. (Note, however, that the `node:domain` module is deprecated.) As a best practice, listeners should always be added for the `'error'` events. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); +myEmitter.on('error', (err) => { + console.error('whoops! there was an error'); +}); +myEmitter.emit('error', new Error('whoops!')); +// Prints: whoops! there was an error +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); myEmitter.on('error', (err) => { console.error('whoops! there was an error'); @@ -200,7 +301,16 @@ myEmitter.emit('error', new Error('whoops!')); Using `async` functions with event handlers is problematic, because it can lead to an unhandled rejection in case of a thrown exception: -```js +```mjs +import { EventEmitter } from 'node:events'; +const ee = new EventEmitter(); +ee.on('something', async (value) => { + throw new Error('kaboom'); +}); +``` + +```cjs +const EventEmitter = require('node:events'); const ee = new EventEmitter(); ee.on('something', async (value) => { throw new Error('kaboom'); @@ -213,7 +323,25 @@ handler on the `Promise`. This handler routes the exception asynchronously to the [`Symbol.for('nodejs.rejection')`][rejection] method if there is one, or to [`'error'`][error] event handler if there is none. -```js +```mjs +import { EventEmitter } from 'node:events'; +const ee1 = new EventEmitter({ captureRejections: true }); +ee1.on('something', async (value) => { + throw new Error('kaboom'); +}); + +ee1.on('error', console.log); + +const ee2 = new EventEmitter({ captureRejections: true }); +ee2.on('something', async (value) => { + throw new Error('kaboom'); +}); + +ee2[Symbol.for('nodejs.rejection')] = console.log; +``` + +```cjs +const EventEmitter = require('node:events'); const ee1 = new EventEmitter({ captureRejections: true }); ee1.on('something', async (value) => { throw new Error('kaboom'); @@ -310,7 +438,31 @@ but important side effect: any _additional_ listeners registered to the same `name` _within_ the `'newListener'` callback are inserted _before_ the listener that is in the process of being added. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} + +const myEmitter = new MyEmitter(); +// Only do this once so we don't loop forever +myEmitter.once('newListener', (event, listener) => { + if (event === 'event') { + // Insert a new listener in front + myEmitter.on('event', () => { + console.log('B'); + }); + } +}); +myEmitter.on('event', () => { + console.log('A'); +}); +myEmitter.emit('event'); +// Prints: +// B +// A +``` + +```cjs +const EventEmitter = require('node:events'); class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); @@ -569,7 +721,19 @@ By default, event listeners are invoked in the order they are added. The `emitter.prependListener()` method can be used as an alternative to add the event listener to the beginning of the listeners array. -```js +```mjs +import { EventEmitter } from 'node:events'; +const myEE = new EventEmitter(); +myEE.on('foo', () => console.log('a')); +myEE.prependListener('foo', () => console.log('b')); +myEE.emit('foo'); +// Prints: +// b +// a +``` + +```cjs +const EventEmitter = require('node:events'); const myEE = new EventEmitter(); myEE.on('foo', () => console.log('a')); myEE.prependListener('foo', () => console.log('b')); @@ -604,7 +768,19 @@ By default, event listeners are invoked in the order they are added. The `emitter.prependOnceListener()` method can be used as an alternative to add the event listener to the beginning of the listeners array. -```js +```mjs +import { EventEmitter } from 'node:events'; +const myEE = new EventEmitter(); +myEE.once('foo', () => console.log('a')); +myEE.prependOnceListener('foo', () => console.log('b')); +myEE.emit('foo'); +// Prints: +// b +// a +``` + +```cjs +const EventEmitter = require('node:events'); const myEE = new EventEmitter(); myEE.once('foo', () => console.log('a')); myEE.prependOnceListener('foo', () => console.log('b')); @@ -710,7 +886,41 @@ time of emitting are called in order. This implies that any _before_ the last listener finishes execution will not remove them from `emit()` in progress. Subsequent events behave as expected. -```js +```mjs +import { EventEmitter } from 'node:events'; +class MyEmitter extends EventEmitter {} +const myEmitter = new MyEmitter(); + +const callbackA = () => { + console.log('A'); + myEmitter.removeListener('event', callbackB); +}; + +const callbackB = () => { + console.log('B'); +}; + +myEmitter.on('event', callbackA); + +myEmitter.on('event', callbackB); + +// callbackA removes listener callbackB but it will still be called. +// Internal listener array at time of emit [callbackA, callbackB] +myEmitter.emit('event'); +// Prints: +// A +// B + +// callbackB is now removed. +// Internal listener array [callbackA] +myEmitter.emit('event'); +// Prints: +// A +``` + +```cjs +const EventEmitter = require('node:events'); +class MyEmitter extends EventEmitter {} const myEmitter = new MyEmitter(); const callbackA = () => { @@ -751,7 +961,24 @@ event (as in the example below), `removeListener()` will remove the most recently added instance. In the example the `once('ping')` listener is removed: -```js +```mjs +import { EventEmitter } from 'node:events'; +const ee = new EventEmitter(); + +function pong() { + console.log('pong'); +} + +ee.on('ping', pong); +ee.once('ping', pong); +ee.removeListener('ping', pong); + +ee.emit('ping'); +ee.emit('ping'); +``` + +```cjs +const EventEmitter = require('node:events'); const ee = new EventEmitter(); function pong() { @@ -797,7 +1024,33 @@ added: v9.4.0 Returns a copy of the array of listeners for the event named `eventName`, including any wrappers (such as those created by `.once()`). -```js +```mjs +import { EventEmitter } from 'node:events'; +const emitter = new EventEmitter(); +emitter.once('log', () => console.log('log once')); + +// Returns a new Array with a function `onceWrapper` which has a property +// `listener` which contains the original listener bound above +const listeners = emitter.rawListeners('log'); +const logFnWrapper = listeners[0]; + +// Logs "log once" to the console and does not unbind the `once` event +logFnWrapper.listener(); + +// Logs "log once" to the console and removes the listener +logFnWrapper(); + +emitter.on('log', () => console.log('log persistently')); +// Will return a new Array with a single function bound by `.on()` above +const newListeners = emitter.rawListeners('log'); + +// Logs "log persistently" twice +newListeners[0](); +emitter.emit('log'); +``` + +```cjs +const EventEmitter = require('node:events'); const emitter = new EventEmitter(); emitter.once('log', () => console.log('log once')); @@ -907,7 +1160,19 @@ that a "possible EventEmitter memory leak" has been detected. For any single `EventEmitter`, the `emitter.getMaxListeners()` and `emitter.setMaxListeners()` methods can be used to temporarily avoid this warning: -```js +```mjs +import { EventEmitter } from 'node:events'; +const emitter = new EventEmitter(); +emitter.setMaxListeners(emitter.getMaxListeners() + 1); +emitter.once('event', () => { + // do stuff + emitter.setMaxListeners(Math.max(emitter.getMaxListeners() - 1, 0)); +}); +``` + +```cjs +const EventEmitter = require('node:events'); +const emitter = new EventEmitter(); emitter.setMaxListeners(emitter.getMaxListeners() + 1); emitter.once('event', () => { // do stuff From 1790569518749a7925077a55eedd39900795778e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Jos=C3=A9?= Date: Wed, 18 Jan 2023 14:32:36 -0500 Subject: [PATCH 181/191] test: improve test coverage for WHATWG `TextDecoder` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Juan José Arboleda PR-URL: https://github.com/nodejs/node/pull/45241 Reviewed-By: Michaël Zasso Reviewed-By: Darshan Sen Reviewed-By: Luigi Pinca Reviewed-By: Yagiz Nizipli Reviewed-By: James M Snell --- .../test-whatwg-encoding-custom-textdecoder.js | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/parallel/test-whatwg-encoding-custom-textdecoder.js b/test/parallel/test-whatwg-encoding-custom-textdecoder.js index a48d0993fc7a92..e4379aa03f870d 100644 --- a/test/parallel/test-whatwg-encoding-custom-textdecoder.js +++ b/test/parallel/test-whatwg-encoding-custom-textdecoder.js @@ -51,6 +51,19 @@ assert(TextDecoder); }); } +// Invalid encoders +{ + ['meow', 'nonunicode', 'foo', 'bar'].forEach((fakeEncoding) => { + assert.throws( + () => { new TextDecoder(fakeEncoding); }, + { + code: 'ERR_ENCODING_NOT_SUPPORTED', + name: 'RangeError' + } + ); + }); +} + // Test TextDecoder, UTF-8, fatal: true, ignoreBOM: false if (common.hasIntl) { ['unicode-1-1-utf-8', 'utf8', 'utf-8'].forEach((i) => { From b7fe8c70faed182a3008975dd2a458e01a9cfab9 Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Wed, 18 Jan 2023 20:50:08 +0000 Subject: [PATCH 182/191] deps: update simdutf to 3.1.0 PR-URL: https://github.com/nodejs/node/pull/46257 Reviewed-By: Anna Henningsen Reviewed-By: Joyee Cheung Reviewed-By: Yagiz Nizipli Reviewed-By: Stephen Belanger Reviewed-By: Rich Trott --- deps/simdutf/simdutf.cpp | 470 +++++++++++++++++++++++------------- deps/simdutf/simdutf.h | 507 ++++++++++++++++++++++++++++++--------- 2 files changed, 707 insertions(+), 270 deletions(-) diff --git a/deps/simdutf/simdutf.cpp b/deps/simdutf/simdutf.cpp index f9c0a649dc1b26..c987f9378bec66 100644 --- a/deps/simdutf/simdutf.cpp +++ b/deps/simdutf/simdutf.cpp @@ -1,4 +1,4 @@ -/* auto-generated on 2023-01-02 15:43:33 -0500. Do not edit! */ +/* auto-generated on 2023-01-18 12:43:26 -0500. Do not edit! */ // dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf.cpp /* begin file src/simdutf.cpp */ #include "simdutf.h" @@ -509,7 +509,7 @@ simdutf_really_inline int16x8_t make_int16x8_t(int16_t x1, int16_t x2, int16_t simdutf_really_inline void store_ascii_as_utf16(char16_t * p) const { uint16x8_t first = vmovl_u8(vget_low_u8 (vreinterpretq_u8_s8(this->value))); uint16x8_t second = vmovl_high_u8(vreinterpretq_u8_s8(this->value)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -4386,7 +4386,6 @@ class detect_best_supported_implementation_on_first_use final : public implement const implementation *set_best() const noexcept; }; -const detect_best_supported_implementation_on_first_use detect_best_supported_implementation_on_first_use_singleton; const std::initializer_list available_implementation_pointers { #if SIMDUTF_IMPLEMENTATION_ICELAKE @@ -4662,187 +4661,322 @@ const implementation *detect_best_supported_implementation_on_first_use::set_bes SIMDUTF_POP_DISABLE_WARNINGS if (force_implementation_name) { - auto force_implementation = available_implementations[force_implementation_name]; + auto force_implementation = get_available_implementations()[force_implementation_name]; if (force_implementation) { - return active_implementation = force_implementation; + return get_active_implementation() = force_implementation; } else { // Note: abort() and stderr usage within the library is forbidden. - return active_implementation = &unsupported_singleton; + return get_active_implementation() = &unsupported_singleton; } } - return active_implementation = available_implementations.detect_best_supported(); + return get_active_implementation() = get_available_implementations().detect_best_supported(); } } // namespace internal -SIMDUTF_DLLIMPORTEXPORT const internal::available_implementation_list available_implementations{}; -SIMDUTF_DLLIMPORTEXPORT internal::atomic_ptr active_implementation{&internal::detect_best_supported_implementation_on_first_use_singleton}; + + +/** + * The list of available implementations compiled into simdutf. + */ +SIMDUTF_DLLIMPORTEXPORT const internal::available_implementation_list& get_available_implementations() { + static const internal::available_implementation_list available_implementations{}; + return available_implementations; +} + +/** + * The active implementation. + */ +SIMDUTF_DLLIMPORTEXPORT internal::atomic_ptr& get_active_implementation() { + static const internal::detect_best_supported_implementation_on_first_use detect_best_supported_implementation_on_first_use_singleton; + static internal::atomic_ptr active_implementation{&detect_best_supported_implementation_on_first_use_singleton}; + return active_implementation; +} simdutf_warn_unused bool validate_utf8(const char *buf, size_t len) noexcept { - return active_implementation->validate_utf8(buf, len); + return get_active_implementation()->validate_utf8(buf, len); } simdutf_warn_unused result validate_utf8_with_errors(const char *buf, size_t len) noexcept { - return active_implementation->validate_utf8_with_errors(buf, len); + return get_active_implementation()->validate_utf8_with_errors(buf, len); } simdutf_warn_unused bool validate_ascii(const char *buf, size_t len) noexcept { - return active_implementation->validate_ascii(buf, len); + return get_active_implementation()->validate_ascii(buf, len); } simdutf_warn_unused result validate_ascii_with_errors(const char *buf, size_t len) noexcept { - return active_implementation->validate_ascii_with_errors(buf, len); + return get_active_implementation()->validate_ascii_with_errors(buf, len); +} +simdutf_warn_unused size_t convert_utf8_to_utf16(const char * input, size_t length, char16_t* utf16_output) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf8_to_utf16be(input, length, utf16_output); + #else + return convert_utf8_to_utf16le(input, length, utf16_output); + #endif } simdutf_warn_unused size_t convert_utf8_to_utf16le(const char * input, size_t length, char16_t* utf16_output) noexcept { - return active_implementation->convert_utf8_to_utf16le(input, length, utf16_output); + return get_active_implementation()->convert_utf8_to_utf16le(input, length, utf16_output); } simdutf_warn_unused size_t convert_utf8_to_utf16be(const char * input, size_t length, char16_t* utf16_output) noexcept { - return active_implementation->convert_utf8_to_utf16be(input, length, utf16_output); + return get_active_implementation()->convert_utf8_to_utf16be(input, length, utf16_output); +} +simdutf_warn_unused result convert_utf8_to_utf16_with_errors(const char * input, size_t length, char16_t* utf16_output) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf8_to_utf16be_with_errors(input, length, utf16_output); + #else + return convert_utf8_to_utf16le_with_errors(input, length, utf16_output); + #endif } simdutf_warn_unused result convert_utf8_to_utf16le_with_errors(const char * input, size_t length, char16_t* utf16_output) noexcept { - return active_implementation->convert_utf8_to_utf16le_with_errors(input, length, utf16_output); + return get_active_implementation()->convert_utf8_to_utf16le_with_errors(input, length, utf16_output); } simdutf_warn_unused result convert_utf8_to_utf16be_with_errors(const char * input, size_t length, char16_t* utf16_output) noexcept { - return active_implementation->convert_utf8_to_utf16be_with_errors(input, length, utf16_output); + return get_active_implementation()->convert_utf8_to_utf16be_with_errors(input, length, utf16_output); } simdutf_warn_unused size_t convert_utf8_to_utf32(const char * input, size_t length, char32_t* utf32_output) noexcept { - return active_implementation->convert_utf8_to_utf32(input, length, utf32_output); + return get_active_implementation()->convert_utf8_to_utf32(input, length, utf32_output); } simdutf_warn_unused result convert_utf8_to_utf32_with_errors(const char * input, size_t length, char32_t* utf32_output) noexcept { - return active_implementation->convert_utf8_to_utf32_with_errors(input, length, utf32_output); + return get_active_implementation()->convert_utf8_to_utf32_with_errors(input, length, utf32_output); +} +simdutf_warn_unused bool validate_utf16(const char16_t * buf, size_t len) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return validate_utf16be(buf, len); + #else + return validate_utf16le(buf, len); + #endif } simdutf_warn_unused bool validate_utf16le(const char16_t * buf, size_t len) noexcept { - return active_implementation->validate_utf16le(buf, len); + return get_active_implementation()->validate_utf16le(buf, len); } simdutf_warn_unused bool validate_utf16be(const char16_t * buf, size_t len) noexcept { - return active_implementation->validate_utf16be(buf, len); + return get_active_implementation()->validate_utf16be(buf, len); +} +simdutf_warn_unused result validate_utf16_with_errors(const char16_t * buf, size_t len) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return validate_utf16be_with_errors(buf, len); + #else + return validate_utf16le_with_errors(buf, len); + #endif } simdutf_warn_unused result validate_utf16le_with_errors(const char16_t * buf, size_t len) noexcept { - return active_implementation->validate_utf16le_with_errors(buf, len); + return get_active_implementation()->validate_utf16le_with_errors(buf, len); } simdutf_warn_unused result validate_utf16be_with_errors(const char16_t * buf, size_t len) noexcept { - return active_implementation->validate_utf16be_with_errors(buf, len); + return get_active_implementation()->validate_utf16be_with_errors(buf, len); } simdutf_warn_unused bool validate_utf32(const char32_t * buf, size_t len) noexcept { - return active_implementation->validate_utf32(buf, len); + return get_active_implementation()->validate_utf32(buf, len); } simdutf_warn_unused result validate_utf32_with_errors(const char32_t * buf, size_t len) noexcept { - return active_implementation->validate_utf32_with_errors(buf, len); + return get_active_implementation()->validate_utf32_with_errors(buf, len); +} +simdutf_warn_unused size_t convert_valid_utf8_to_utf16(const char * input, size_t length, char16_t* utf16_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_valid_utf8_to_utf16be(input, length, utf16_buffer); + #else + return convert_valid_utf8_to_utf16le(input, length, utf16_buffer); + #endif } simdutf_warn_unused size_t convert_valid_utf8_to_utf16le(const char * input, size_t length, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_valid_utf8_to_utf16le(input, length, utf16_buffer); + return get_active_implementation()->convert_valid_utf8_to_utf16le(input, length, utf16_buffer); } simdutf_warn_unused size_t convert_valid_utf8_to_utf16be(const char * input, size_t length, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_valid_utf8_to_utf16be(input, length, utf16_buffer); + return get_active_implementation()->convert_valid_utf8_to_utf16be(input, length, utf16_buffer); } simdutf_warn_unused size_t convert_valid_utf8_to_utf32(const char * input, size_t length, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_valid_utf8_to_utf32(input, length, utf32_buffer); + return get_active_implementation()->convert_valid_utf8_to_utf32(input, length, utf32_buffer); +} +simdutf_warn_unused size_t convert_utf16_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf16be_to_utf8(buf, len, utf8_buffer); + #else + return convert_utf16le_to_utf8(buf, len, utf8_buffer); + #endif } simdutf_warn_unused size_t convert_utf16le_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf16le_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf16le_to_utf8(buf, len, utf8_buffer); } simdutf_warn_unused size_t convert_utf16be_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf16be_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf16be_to_utf8(buf, len, utf8_buffer); +} +simdutf_warn_unused result convert_utf16_to_utf8_with_errors(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf16be_to_utf8_with_errors(buf, len, utf8_buffer); + #else + return convert_utf16le_to_utf8_with_errors(buf, len, utf8_buffer); + #endif } simdutf_warn_unused result convert_utf16le_to_utf8_with_errors(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf16le_to_utf8_with_errors(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf16le_to_utf8_with_errors(buf, len, utf8_buffer); } simdutf_warn_unused result convert_utf16be_to_utf8_with_errors(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf16be_to_utf8_with_errors(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf16be_to_utf8_with_errors(buf, len, utf8_buffer); +} +simdutf_warn_unused size_t convert_valid_utf16_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { + #if BIG_ENDIAN + return convert_valid_utf16be_to_utf8(buf, len, utf8_buffer); + #else + return convert_valid_utf16le_to_utf8(buf, len, utf8_buffer); + #endif } simdutf_warn_unused size_t convert_valid_utf16le_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_valid_utf16le_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_valid_utf16le_to_utf8(buf, len, utf8_buffer); } simdutf_warn_unused size_t convert_valid_utf16be_to_utf8(const char16_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_valid_utf16be_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_valid_utf16be_to_utf8(buf, len, utf8_buffer); } simdutf_warn_unused size_t convert_utf32_to_utf8(const char32_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf32_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf32_to_utf8(buf, len, utf8_buffer); } simdutf_warn_unused result convert_utf32_to_utf8_with_errors(const char32_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_utf32_to_utf8_with_errors(buf, len, utf8_buffer); + return get_active_implementation()->convert_utf32_to_utf8_with_errors(buf, len, utf8_buffer); } simdutf_warn_unused size_t convert_valid_utf32_to_utf8(const char32_t * buf, size_t len, char* utf8_buffer) noexcept { - return active_implementation->convert_valid_utf32_to_utf8(buf, len, utf8_buffer); + return get_active_implementation()->convert_valid_utf32_to_utf8(buf, len, utf8_buffer); +} +simdutf_warn_unused size_t convert_utf32_to_utf16(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf32_to_utf16be(buf, len, utf16_buffer); + #else + return convert_utf32_to_utf16le(buf, len, utf16_buffer); + #endif } simdutf_warn_unused size_t convert_utf32_to_utf16le(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_utf32_to_utf16le(buf, len, utf16_buffer); + return get_active_implementation()->convert_utf32_to_utf16le(buf, len, utf16_buffer); } simdutf_warn_unused size_t convert_utf32_to_utf16be(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_utf32_to_utf16be(buf, len, utf16_buffer); + return get_active_implementation()->convert_utf32_to_utf16be(buf, len, utf16_buffer); +} +simdutf_warn_unused result convert_utf32_to_utf16_with_errors(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf32_to_utf16be_with_errors(buf, len, utf16_buffer); + #else + return convert_utf32_to_utf16le_with_errors(buf, len, utf16_buffer); + #endif } simdutf_warn_unused result convert_utf32_to_utf16le_with_errors(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_utf32_to_utf16le_with_errors(buf, len, utf16_buffer); + return get_active_implementation()->convert_utf32_to_utf16le_with_errors(buf, len, utf16_buffer); } simdutf_warn_unused result convert_utf32_to_utf16be_with_errors(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_utf32_to_utf16be_with_errors(buf, len, utf16_buffer); + return get_active_implementation()->convert_utf32_to_utf16be_with_errors(buf, len, utf16_buffer); +} +simdutf_warn_unused size_t convert_valid_utf32_to_utf16(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_valid_utf32_to_utf16be(buf, len, utf16_buffer); + #else + return convert_valid_utf32_to_utf16le(buf, len, utf16_buffer); + #endif } simdutf_warn_unused size_t convert_valid_utf32_to_utf16le(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_valid_utf32_to_utf16le(buf, len, utf16_buffer); + return get_active_implementation()->convert_valid_utf32_to_utf16le(buf, len, utf16_buffer); } simdutf_warn_unused size_t convert_valid_utf32_to_utf16be(const char32_t * buf, size_t len, char16_t* utf16_buffer) noexcept { - return active_implementation->convert_valid_utf32_to_utf16be(buf, len, utf16_buffer); + return get_active_implementation()->convert_valid_utf32_to_utf16be(buf, len, utf16_buffer); +} +simdutf_warn_unused size_t convert_utf16_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf16be_to_utf32(buf, len, utf32_buffer); + #else + return convert_utf16le_to_utf32(buf, len, utf32_buffer); + #endif } simdutf_warn_unused size_t convert_utf16le_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_utf16le_to_utf32(buf, len, utf32_buffer); + return get_active_implementation()->convert_utf16le_to_utf32(buf, len, utf32_buffer); } simdutf_warn_unused size_t convert_utf16be_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_utf16be_to_utf32(buf, len, utf32_buffer); + return get_active_implementation()->convert_utf16be_to_utf32(buf, len, utf32_buffer); +} +simdutf_warn_unused result convert_utf16_to_utf32_with_errors(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_utf16be_to_utf32_with_errors(buf, len, utf32_buffer); + #else + return convert_utf16le_to_utf32_with_errors(buf, len, utf32_buffer); + #endif } simdutf_warn_unused result convert_utf16le_to_utf32_with_errors(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_utf16le_to_utf32_with_errors(buf, len, utf32_buffer); + return get_active_implementation()->convert_utf16le_to_utf32_with_errors(buf, len, utf32_buffer); } simdutf_warn_unused result convert_utf16be_to_utf32_with_errors(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_utf16be_to_utf32_with_errors(buf, len, utf32_buffer); + return get_active_implementation()->convert_utf16be_to_utf32_with_errors(buf, len, utf32_buffer); +} +simdutf_warn_unused size_t convert_valid_utf16_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return convert_valid_utf16be_to_utf32(buf, len, utf32_buffer); + #else + return convert_valid_utf16le_to_utf32(buf, len, utf32_buffer); + #endif } simdutf_warn_unused size_t convert_valid_utf16le_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_valid_utf16le_to_utf32(buf, len, utf32_buffer); + return get_active_implementation()->convert_valid_utf16le_to_utf32(buf, len, utf32_buffer); } simdutf_warn_unused size_t convert_valid_utf16be_to_utf32(const char16_t * buf, size_t len, char32_t* utf32_buffer) noexcept { - return active_implementation->convert_valid_utf16be_to_utf32(buf, len, utf32_buffer); + return get_active_implementation()->convert_valid_utf16be_to_utf32(buf, len, utf32_buffer); } void change_endianness_utf16(const char16_t * input, size_t length, char16_t * output) noexcept { - active_implementation->change_endianness_utf16(input, length, output); + get_active_implementation()->change_endianness_utf16(input, length, output); +} +simdutf_warn_unused size_t count_utf16(const char16_t * input, size_t length) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return count_utf16be(input, length); + #else + return count_utf16le(input, length); + #endif } simdutf_warn_unused size_t count_utf16le(const char16_t * input, size_t length) noexcept { - return active_implementation->count_utf16le(input, length); + return get_active_implementation()->count_utf16le(input, length); } simdutf_warn_unused size_t count_utf16be(const char16_t * input, size_t length) noexcept { - return active_implementation->count_utf16be(input, length); + return get_active_implementation()->count_utf16be(input, length); } simdutf_warn_unused size_t count_utf8(const char * input, size_t length) noexcept { - return active_implementation->count_utf8(input, length); + return get_active_implementation()->count_utf8(input, length); +} +simdutf_warn_unused size_t utf8_length_from_utf16(const char16_t * input, size_t length) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return utf8_length_from_utf16be(input, length); + #else + return utf8_length_from_utf16le(input, length); + #endif } simdutf_warn_unused size_t utf8_length_from_utf16le(const char16_t * input, size_t length) noexcept { - return active_implementation->utf8_length_from_utf16le(input, length); + return get_active_implementation()->utf8_length_from_utf16le(input, length); } simdutf_warn_unused size_t utf8_length_from_utf16be(const char16_t * input, size_t length) noexcept { - return active_implementation->utf8_length_from_utf16be(input, length); + return get_active_implementation()->utf8_length_from_utf16be(input, length); +} +simdutf_warn_unused size_t utf32_length_from_utf16(const char16_t * input, size_t length) noexcept { + #if SIMDUTF_IS_BIG_ENDIAN + return utf32_length_from_utf16be(input, length); + #else + return utf32_length_from_utf16le(input, length); + #endif } simdutf_warn_unused size_t utf32_length_from_utf16le(const char16_t * input, size_t length) noexcept { - return active_implementation->utf32_length_from_utf16le(input, length); + return get_active_implementation()->utf32_length_from_utf16le(input, length); } simdutf_warn_unused size_t utf32_length_from_utf16be(const char16_t * input, size_t length) noexcept { - return active_implementation->utf32_length_from_utf16be(input, length); + return get_active_implementation()->utf32_length_from_utf16be(input, length); } simdutf_warn_unused size_t utf16_length_from_utf8(const char * input, size_t length) noexcept { - return active_implementation->utf16_length_from_utf8(input, length); + return get_active_implementation()->utf16_length_from_utf8(input, length); } simdutf_warn_unused size_t utf8_length_from_utf32(const char32_t * input, size_t length) noexcept { - return active_implementation->utf8_length_from_utf32(input, length); + return get_active_implementation()->utf8_length_from_utf32(input, length); } simdutf_warn_unused size_t utf16_length_from_utf32(const char32_t * input, size_t length) noexcept { - return active_implementation->utf16_length_from_utf32(input, length); + return get_active_implementation()->utf16_length_from_utf32(input, length); } simdutf_warn_unused size_t utf32_length_from_utf8(const char * input, size_t length) noexcept { - return active_implementation->utf32_length_from_utf8(input, length); + return get_active_implementation()->utf32_length_from_utf8(input, length); } simdutf_warn_unused simdutf::encoding_type autodetect_encoding(const char * buf, size_t length) noexcept { - return active_implementation->autodetect_encoding(buf, length); + return get_active_implementation()->autodetect_encoding(buf, length); } simdutf_warn_unused int detect_encodings(const char * buf, size_t length) noexcept { - return active_implementation->detect_encodings(buf, length); + return get_active_implementation()->detect_encodings(buf, length); } const implementation * builtin_implementation() { - static const implementation * builtin_impl = available_implementations[SIMDUTF_STRINGIFY(SIMDUTF_BUILTIN_IMPLEMENTATION)]; + static const implementation * builtin_impl = get_available_implementations()[SIMDUTF_STRINGIFY(SIMDUTF_BUILTIN_IMPLEMENTATION)]; return builtin_impl; } @@ -4854,6 +4988,14 @@ const implementation * builtin_implementation() { /* begin file src/encoding_types.cpp */ namespace simdutf { +bool match_system(endianness e) { +#if SIMDUTF_IS_BIG_ENDIAN + return e == endianness::BIG; +#else + return e == endianness::LITTLE; +#endif +} + std::string to_string(encoding_type bom) { switch (bom) { case UTF16_LE: return "UTF16 little-endian"; @@ -10071,12 +10213,12 @@ inline simdutf_warn_unused bool validate(const char16_t *buf, size_t len) noexce const uint16_t *data = reinterpret_cast(buf); uint64_t pos = 0; while (pos < len) { - uint16_t word = big_endian ? swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? swap_bytes(data[pos]) : data[pos]; if((word &0xF800) == 0xD800) { if(pos + 1 >= len) { return false; } uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return false; } - uint16_t next_word = big_endian ? uint16_t((data[pos + 1] >> 8) | (data[pos + 1] << 8)) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return false; } pos += 2; @@ -10092,12 +10234,12 @@ inline simdutf_warn_unused result validate_with_errors(const char16_t *buf, size const uint16_t *data = reinterpret_cast(buf); size_t pos = 0; while (pos < len) { - uint16_t word = big_endian ? swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? swap_bytes(data[pos]) : data[pos]; if((word & 0xF800) == 0xD800) { if(pos + 1 >= len) { return result(error_code::SURROGATE, pos); } uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return result(error_code::SURROGATE, pos); } - uint16_t next_word = big_endian ? uint16_t((data[pos + 1] >> 8) | (data[pos + 1] << 8)) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return result(error_code::SURROGATE, pos); } pos += 2; @@ -10114,7 +10256,7 @@ inline size_t count_code_points(const char16_t* buf, size_t len) { const uint16_t * p = reinterpret_cast(buf); size_t counter{0}; for(size_t i = 0; i < len; i++) { - uint16_t word = big_endian ? swap_bytes(p[i]) : p[i]; + uint16_t word = !match_system(big_endian) ? swap_bytes(p[i]) : p[i]; counter += ((word & 0xFC00) != 0xDC00); } return counter; @@ -10126,7 +10268,7 @@ inline size_t utf8_length_from_utf16(const char16_t* buf, size_t len) { const uint16_t * p = reinterpret_cast(buf); size_t counter{0}; for(size_t i = 0; i < len; i++) { - uint16_t word = big_endian ? swap_bytes(p[i]) : p[i]; + uint16_t word = !match_system(big_endian) ? swap_bytes(p[i]) : p[i]; /** ASCII **/ if(word <= 0x7F) { counter++; } /** two-byte **/ @@ -10145,7 +10287,7 @@ inline size_t utf32_length_from_utf16(const char16_t* buf, size_t len) { const uint16_t * p = reinterpret_cast(buf); size_t counter{0}; for(size_t i = 0; i < len; i++) { - uint16_t word = big_endian ? swap_bytes(p[i]) : p[i]; + uint16_t word = !match_system(big_endian) ? swap_bytes(p[i]) : p[i]; counter += ((word & 0xFC00) != 0xDC00); } return counter; @@ -10440,14 +10582,14 @@ inline size_t convert_valid(const char32_t* buf, size_t len, char16_t* utf16_out uint32_t word = data[pos]; if((word & 0xFFFF0000)==0) { // will not generate a surrogate pair - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); pos++; } else { // will generate a surrogate pair word -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (word >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (word & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -10486,14 +10628,14 @@ inline size_t convert(const char32_t* buf, size_t len, char16_t* utf16_output) { if((word & 0xFFFF0000)==0) { if (word >= 0xD800 && word <= 0xDFFF) { return 0; } // will not generate a surrogate pair - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); } else { // will generate a surrogate pair if (word > 0x10FFFF) { return 0; } word -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (word >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (word & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -10515,14 +10657,14 @@ inline result convert_with_errors(const char32_t* buf, size_t len, char16_t* utf if((word & 0xFFFF0000)==0) { if (word >= 0xD800 && word <= 0xDFFF) { return result(error_code::SURROGATE, pos); } // will not generate a surrogate pair - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(uint16_t(word))) : char16_t(word); } else { // will generate a surrogate pair if (word > 0x10FFFF) { return result(error_code::TOO_LARGE, pos); } word -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (word >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (word & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -10562,17 +10704,18 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char* utf8_output) if (pos + 4 <= len) { // if it is safe to read 8 more bytes, check that they are ascii uint64_t v; ::memcpy(&v, data + pos, sizeof(uint64_t)); - if (big_endian) v = (v >> 8) | (v << (64 - 8)); + if (!match_system(big_endian)) v = (v >> 8) | (v << (64 - 8)); if ((v & 0xFF80FF80FF80FF80) == 0) { size_t final_pos = pos + 4; while(pos < final_pos) { - *utf8_output++ = big_endian ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); + *utf8_output++ = !match_system(big_endian) ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); pos++; } continue; } } - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word & 0xFF80)==0) { // will generate one UTF-8 bytes *utf8_output++ = char(word); @@ -10594,7 +10737,7 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char* utf8_output) // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); if(pos + 1 >= len) { return 0; } // minimal bound checking - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); uint32_t value = (diff << 10) + diff2 + 0x10000; // will generate four UTF-8 bytes @@ -10636,17 +10779,17 @@ inline size_t convert(const char16_t* buf, size_t len, char* utf8_output) { if (pos + 4 <= len) { // if it is safe to read 8 more bytes, check that they are ascii uint64_t v; ::memcpy(&v, data + pos, sizeof(uint64_t)); - if (big_endian) v = (v >> 8) | (v << (64 - 8)); + if (!match_system(big_endian)) v = (v >> 8) | (v << (64 - 8)); if ((v & 0xFF80FF80FF80FF80) == 0) { size_t final_pos = pos + 4; while(pos < final_pos) { - *utf8_output++ = big_endian ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); + *utf8_output++ = !match_system(big_endian) ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); pos++; } continue; } } - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word & 0xFF80)==0) { // will generate one UTF-8 bytes *utf8_output++ = char(word); @@ -10669,7 +10812,7 @@ inline size_t convert(const char16_t* buf, size_t len, char* utf8_output) { if(pos + 1 >= len) { return 0; } uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return 0; } - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return 0; } uint32_t value = (diff << 10) + diff2 + 0x10000; @@ -10695,17 +10838,17 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char* utf8_ou if (pos + 4 <= len) { // if it is safe to read 8 more bytes, check that they are ascii uint64_t v; ::memcpy(&v, data + pos, sizeof(uint64_t)); - if (big_endian) v = (v >> 8) | (v << (64 - 8)); + if (!match_system(big_endian)) v = (v >> 8) | (v << (64 - 8)); if ((v & 0xFF80FF80FF80FF80) == 0) { size_t final_pos = pos + 4; while(pos < final_pos) { - *utf8_output++ = big_endian ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); + *utf8_output++ = !match_system(big_endian) ? char(utf16::swap_bytes(buf[pos])) : char(buf[pos]); pos++; } continue; } } - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word & 0xFF80)==0) { // will generate one UTF-8 bytes *utf8_output++ = char(word); @@ -10728,7 +10871,7 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char* utf8_ou if(pos + 1 >= len) { return result(error_code::SURROGATE, pos); } uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return result(error_code::SURROGATE, pos); } - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return result(error_code::SURROGATE, pos); } uint32_t value = (diff << 10) + diff2 + 0x10000; @@ -10768,7 +10911,7 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char32_t* utf32_out size_t pos = 0; char32_t* start{utf32_output}; while (pos < len) { - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word &0xF800 ) != 0xD800) { // No surrogate pair, extend 16-bit word to 32-bit word *utf32_output++ = char32_t(word); @@ -10777,7 +10920,7 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char32_t* utf32_out // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); if(pos + 1 >= len) { return 0; } // minimal bound checking - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); uint32_t value = (diff << 10) + diff2 + 0x10000; *utf32_output++ = char32_t(value); @@ -10810,7 +10953,7 @@ inline size_t convert(const char16_t* buf, size_t len, char32_t* utf32_output) { size_t pos = 0; char32_t* start{utf32_output}; while (pos < len) { - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word &0xF800 ) != 0xD800) { // No surrogate pair, extend 16-bit word to 32-bit word *utf32_output++ = char32_t(word); @@ -10820,7 +10963,7 @@ inline size_t convert(const char16_t* buf, size_t len, char32_t* utf32_output) { uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return 0; } if(pos + 1 >= len) { return 0; } // minimal bound checking - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return 0; } uint32_t value = (diff << 10) + diff2 + 0x10000; @@ -10837,7 +10980,7 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char32_t* utf size_t pos = 0; char32_t* start{utf32_output}; while (pos < len) { - uint16_t word = big_endian ? utf16::swap_bytes(data[pos]) : data[pos]; + uint16_t word = !match_system(big_endian) ? utf16::swap_bytes(data[pos]) : data[pos]; if((word &0xF800 ) != 0xD800) { // No surrogate pair, extend 16-bit word to 32-bit word *utf32_output++ = char32_t(word); @@ -10847,7 +10990,7 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char32_t* utf uint16_t diff = uint16_t(word - 0xD800); if(diff > 0x3FF) { return result(error_code::SURROGATE, pos); } if(pos + 1 >= len) { return result(error_code::SURROGATE, pos); } // minimal bound checking - uint16_t next_word = big_endian ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; + uint16_t next_word = !match_system(big_endian) ? utf16::swap_bytes(data[pos + 1]) : data[pos + 1]; uint16_t diff2 = uint16_t(next_word - 0xDC00); if(diff2 > 0x3FF) { return result(error_code::SURROGATE, pos); } uint32_t value = (diff << 10) + diff2 + 0x10000; @@ -10889,7 +11032,7 @@ inline size_t convert_valid(const char* buf, size_t len, char16_t* utf16_output) if ((v & 0x8080808080808080) == 0) { size_t final_pos = pos + 8; while(pos < final_pos) { - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); pos++; } continue; @@ -10898,14 +11041,14 @@ inline size_t convert_valid(const char* buf, size_t len, char16_t* utf16_output) uint8_t leading_byte = data[pos]; // leading byte if (leading_byte < 0b10000000) { // converting one ASCII byte !!! - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(leading_byte)) : char16_t(leading_byte); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(leading_byte)) : char16_t(leading_byte); pos++; } else if ((leading_byte & 0b11100000) == 0b11000000) { // We have a two-byte UTF-8, it should become // a single UTF-16 word. if(pos + 1 >= len) { break; } // minimal bound checking uint16_t code_point = uint16_t(((leading_byte &0b00011111) << 6) | (data[pos + 1] &0b00111111)); - if (big_endian) { + if (!match_system(big_endian)) { code_point = utf16::swap_bytes(uint16_t(code_point)); } *utf16_output++ = char16_t(code_point); @@ -10915,7 +11058,7 @@ inline size_t convert_valid(const char* buf, size_t len, char16_t* utf16_output) // a single UTF-16 word. if(pos + 2 >= len) { break; } // minimal bound checking uint16_t code_point = uint16_t(((leading_byte &0b00001111) << 12) | ((data[pos + 1] &0b00111111) << 6) | (data[pos + 2] &0b00111111)); - if (big_endian) { + if (!match_system(big_endian)) { code_point = utf16::swap_bytes(uint16_t(code_point)); } *utf16_output++ = char16_t(code_point); @@ -10928,7 +11071,7 @@ inline size_t convert_valid(const char* buf, size_t len, char16_t* utf16_output) code_point -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (code_point >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (code_point & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -10977,16 +11120,17 @@ inline size_t convert(const char* buf, size_t len, char16_t* utf16_output) { if ((v & 0x8080808080808080) == 0) { size_t final_pos = pos + 16; while(pos < final_pos) { - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); pos++; } continue; } } + uint8_t leading_byte = data[pos]; // leading byte if (leading_byte < 0b10000000) { // converting one ASCII byte !!! - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(leading_byte)): char16_t(leading_byte); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(leading_byte)): char16_t(leading_byte); pos++; } else if ((leading_byte & 0b11100000) == 0b11000000) { // We have a two-byte UTF-8, it should become @@ -10996,7 +11140,7 @@ inline size_t convert(const char* buf, size_t len, char16_t* utf16_output) { // range check uint32_t code_point = (leading_byte & 0b00011111) << 6 | (data[pos + 1] & 0b00111111); if (code_point < 0x80 || 0x7ff < code_point) { return 0; } - if (big_endian) { + if (!match_system(big_endian)) { code_point = uint32_t(utf16::swap_bytes(uint16_t(code_point))); } *utf16_output++ = char16_t(code_point); @@ -11016,7 +11160,7 @@ inline size_t convert(const char* buf, size_t len, char16_t* utf16_output) { (0xd7ff < code_point && code_point < 0xe000)) { return 0; } - if (big_endian) { + if (!match_system(big_endian)) { code_point = uint32_t(utf16::swap_bytes(uint16_t(code_point))); } *utf16_output++ = char16_t(code_point); @@ -11036,7 +11180,7 @@ inline size_t convert(const char* buf, size_t len, char16_t* utf16_output) { code_point -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (code_point >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (code_point & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -11066,7 +11210,7 @@ inline result convert_with_errors(const char* buf, size_t len, char16_t* utf16_o if ((v & 0x8080808080808080) == 0) { size_t final_pos = pos + 16; while(pos < final_pos) { - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(buf[pos])) : char16_t(buf[pos]); pos++; } continue; @@ -11075,7 +11219,7 @@ inline result convert_with_errors(const char* buf, size_t len, char16_t* utf16_o uint8_t leading_byte = data[pos]; // leading byte if (leading_byte < 0b10000000) { // converting one ASCII byte !!! - *utf16_output++ = big_endian ? char16_t(utf16::swap_bytes(leading_byte)): char16_t(leading_byte); + *utf16_output++ = !match_system(big_endian) ? char16_t(utf16::swap_bytes(leading_byte)): char16_t(leading_byte); pos++; } else if ((leading_byte & 0b11100000) == 0b11000000) { // We have a two-byte UTF-8, it should become @@ -11085,7 +11229,7 @@ inline result convert_with_errors(const char* buf, size_t len, char16_t* utf16_o // range check uint32_t code_point = (leading_byte & 0b00011111) << 6 | (data[pos + 1] & 0b00111111); if (code_point < 0x80 || 0x7ff < code_point) { return result(error_code::OVERLONG, pos); } - if (big_endian) { + if (!match_system(big_endian)) { code_point = uint32_t(utf16::swap_bytes(uint16_t(code_point))); } *utf16_output++ = char16_t(code_point); @@ -11103,7 +11247,7 @@ inline result convert_with_errors(const char* buf, size_t len, char16_t* utf16_o (data[pos + 2] & 0b00111111); if ((code_point < 0x800) || (0xffff < code_point)) { return result(error_code::OVERLONG, pos);} if (0xd7ff < code_point && code_point < 0xe000) { return result(error_code::SURROGATE, pos); } - if (big_endian) { + if (!match_system(big_endian)) { code_point = uint32_t(utf16::swap_bytes(uint16_t(code_point))); } *utf16_output++ = char16_t(code_point); @@ -11124,7 +11268,7 @@ inline result convert_with_errors(const char* buf, size_t len, char16_t* utf16_o code_point -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (code_point >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (code_point & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = utf16::swap_bytes(high_surrogate); low_surrogate = utf16::swap_bytes(low_surrogate); } @@ -11510,8 +11654,8 @@ int arm_detect_encodings(const char * buf, size_t len) { if (surrogates_wordmask0 != 0 || surrogates_wordmask1 != 0) { // Cannot be UTF8 is_utf8 = false; - // Can still be either UTF-16LE or UTF-32LE depending on the positions of the surrogates - // To be valid UTF-32LE, a surrogate cannot be in the two most significant bytes of any 32-bit word. + // Can still be either UTF-16LE or UTF-32 depending on the positions of the surrogates + // To be valid UTF-32, a surrogate cannot be in the two most significant bytes of any 32-bit word. // On the other hand, to be valid UTF-16LE, at least one surrogate must be in the two most significant // bytes of a 32-bit word since they always come in pairs in UTF-16LE. // Note that we always proceed in multiple of 4 before this point so there is no offset in 32-bit words. @@ -11582,7 +11726,7 @@ int arm_detect_encodings(const char * buf, size_t len) { } } else { is_utf16 = false; - // Check for UTF-32LE + // Check for UTF-32 if (len % 4 == 0) { const char32_t * input = reinterpret_cast(buf); const char32_t* end32 = reinterpret_cast(start) + len/4; @@ -11626,7 +11770,7 @@ int arm_detect_encodings(const char * buf, size_t len) { } // If no surrogate, validate under other encodings as well - // UTF-32LE validation + // UTF-32 validation currentmax = vmaxq_u32(vreinterpretq_u32_u16(in),currentmax); currentmax = vmaxq_u32(vreinterpretq_u32_u16(secondin),currentmax); currentmax = vmaxq_u32(vreinterpretq_u32_u16(thirdin),currentmax); @@ -11686,7 +11830,7 @@ const char16_t* arm_validate_utf16(const char16_t* input, size_t size) { // consists only the higher bytes. auto in0 = simd16(input); auto in1 = simd16(input + simd16::SIZE / sizeof(char16_t)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -11762,7 +11906,7 @@ const result arm_validate_utf16_with_errors(const char16_t* input, size_t size) auto in0 = simd16(input); auto in1 = simd16(input + simd16::SIZE / sizeof(char16_t)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -11918,7 +12062,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, // We process in chunks of 16 bytes uint16x8_t ascii_first = vmovl_u8(vget_low_u8 (in)); uint16x8_t ascii_second = vmovl_high_u8(in); - if (big_endian) { + if (!match_system(big_endian)) { ascii_first = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(ascii_first), swap)); ascii_second = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(ascii_second), swap)); } @@ -11934,7 +12078,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, uint8x16_t ascii = vandq_u8(perm, vreinterpretq_u8_u16(vmovq_n_u16(0x7f))); uint8x16_t highbyte = vandq_u8(perm, vreinterpretq_u8_u16(vmovq_n_u16(0x1f00))); uint8x16_t composed = vorrq_u8(ascii, vreinterpretq_u8_u16(vshrq_n_u16(vreinterpretq_u16_u8(highbyte), 2))); - if (big_endian) composed = vqtbl1q_u8(composed, swap); + if (!match_system(big_endian)) composed = vqtbl1q_u8(composed, swap); vst1q_u8(reinterpret_cast(utf16_output), composed); utf16_output += 8; // We wrote 16 bytes, 8 code points. return 16; @@ -11959,7 +12103,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, uint32x4_t composed = vorrq_u32(vorrq_u32(vreinterpretq_u32_u8(ascii), vreinterpretq_u32_u8(middlebyte_shifted)), highbyte_shifted); uint16x8_t composed_repacked = vmovn_high_u32(vmovn_u32(composed), composed); - if (big_endian) composed_repacked = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(composed_repacked), swap)); + if (!match_system(big_endian)) composed_repacked = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(composed_repacked), swap)); vst1q_u16(reinterpret_cast(utf16_output), composed_repacked); utf16_output += 4; return 12; @@ -11982,7 +12126,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, uint8x16_t ascii = vandq_u8(perm, vreinterpretq_u8_u16(vmovq_n_u16(0x7f))); uint8x16_t highbyte = vandq_u8(perm, vreinterpretq_u8_u16(vmovq_n_u16(0x1f00))); uint8x16_t composed = vorrq_u8(ascii, vreinterpretq_u8_u16(vshrq_n_u16(vreinterpretq_u16_u8(highbyte), 2))); - if (big_endian) composed = vqtbl1q_u8(composed, swap); + if (!match_system(big_endian)) composed = vqtbl1q_u8(composed, swap); vst1q_u8(reinterpret_cast(utf16_output), composed); utf16_output += 6; // We wrote 12 bytes, 6 code points. } else if (idx < 145) { @@ -12000,7 +12144,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, uint32x4_t composed = vorrq_u32(vorrq_u32(vreinterpretq_u32_u8(ascii), vreinterpretq_u32_u8(middlebyte_shifted)), highbyte_shifted); uint16x8_t composed_repacked = vmovn_high_u32(vmovn_u32(composed), composed); - if (big_endian) composed_repacked = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(composed_repacked), swap)); + if (!match_system(big_endian)) composed_repacked = vreinterpretq_u16_u8(vqtbl1q_u8(vreinterpretq_u8_u16(composed_repacked), swap)); vst1q_u16(reinterpret_cast(utf16_output), composed_repacked); utf16_output += 4; } else if (idx < 209) { @@ -12035,7 +12179,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, vorrq_u32(hightenbitsadd, lowtenbitsaddshifted); uint32_t basic_buffer[4]; uint32_t basic_buffer_swap[4]; - if (big_endian) { + if (!match_system(big_endian)) { vst1q_u32(basic_buffer_swap, vreinterpretq_u32_u8(vqtbl1q_u8(composed, swap))); surrogates = vreinterpretq_u32_u8(vqtbl1q_u8(vreinterpretq_u8_u32(surrogates), swap)); } @@ -12044,7 +12188,7 @@ size_t convert_masked_utf8_to_utf16(const char *input, vst1q_u32(surrogate_buffer, surrogates); for (size_t i = 0; i < 3; i++) { if (basic_buffer[i] < 65536) { - utf16_output[0] = big_endian ? uint16_t(basic_buffer_swap[i]) : uint16_t(basic_buffer[i]); + utf16_output[0] = !match_system(big_endian) ? uint16_t(basic_buffer_swap[i]) : uint16_t(basic_buffer[i]); utf16_output++; } else { utf16_output[0] = uint16_t(surrogate_buffer[i] & 0xffff); @@ -12260,7 +12404,7 @@ std::pair arm_convert_utf16_to_utf8(const char16_t* buf, while (buf + 16 <= end) { uint16x8_t in = vld1q_u16(reinterpret_cast(buf)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12271,7 +12415,7 @@ std::pair arm_convert_utf16_to_utf8(const char16_t* buf, if(vmaxvq_u16(in) <= 0x7F) { // ASCII fast path!!!! // It is common enough that we have sequences of 16 consecutive ASCII characters. uint16x8_t nextin = vld1q_u16(reinterpret_cast(buf) + 8); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12477,7 +12621,7 @@ std::pair arm_convert_utf16_to_utf8(const char16_t* buf, size_t k = 0; if(size_t(end - buf) < forward + 1) { forward = size_t(end - buf - 1);} for(; k < forward; k++) { - uint16_t word = big_endian ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; + uint16_t word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; if((word & 0xFF80)==0) { *utf8_output++ = char(word); } else if((word & 0xF800)==0) { @@ -12490,7 +12634,7 @@ std::pair arm_convert_utf16_to_utf8(const char16_t* buf, } else { // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); - uint16_t next_word = big_endian ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; + uint16_t next_word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; k++; uint16_t diff2 = uint16_t(next_word - 0xDC00); if((diff | diff2) > 0x3FF) { return std::make_pair(nullptr, reinterpret_cast(utf8_output)); } @@ -12527,7 +12671,7 @@ std::pair arm_convert_utf16_to_utf8_with_errors(const char16_t* b while (buf + 16 <= end) { uint16x8_t in = vld1q_u16(reinterpret_cast(buf)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12538,7 +12682,7 @@ std::pair arm_convert_utf16_to_utf8_with_errors(const char16_t* b if(vmaxvq_u16(in) <= 0x7F) { // ASCII fast path!!!! // It is common enough that we have sequences of 16 consecutive ASCII characters. uint16x8_t nextin = vld1q_u16(reinterpret_cast(buf) + 8); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12744,7 +12888,7 @@ std::pair arm_convert_utf16_to_utf8_with_errors(const char16_t* b size_t k = 0; if(size_t(end - buf) < forward + 1) { forward = size_t(end - buf - 1);} for(; k < forward; k++) { - uint16_t word = big_endian ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; + uint16_t word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; if((word & 0xFF80)==0) { *utf8_output++ = char(word); } else if((word & 0xF800)==0) { @@ -12757,7 +12901,7 @@ std::pair arm_convert_utf16_to_utf8_with_errors(const char16_t* b } else { // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); - uint16_t next_word = big_endian ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; + uint16_t next_word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; k++; uint16_t diff2 = uint16_t(next_word - 0xDC00); if((diff | diff2) > 0x3FF) { return std::make_pair(result(error_code::SURROGATE, buf - start + k - 1), reinterpret_cast(utf8_output)); } @@ -12839,7 +12983,7 @@ std::pair arm_convert_utf16_to_utf32(const char16_t* while (buf + 16 <= end) { uint16x8_t in = vld1q_u16(reinterpret_cast(buf)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12866,13 +13010,13 @@ std::pair arm_convert_utf16_to_utf32(const char16_t* size_t k = 0; if(size_t(end - buf) < forward + 1) { forward = size_t(end - buf - 1);} for(; k < forward; k++) { - uint16_t word = big_endian ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; + uint16_t word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; if((word &0xF800 ) != 0xD800) { *utf32_output++ = char32_t(word); } else { // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); - uint16_t next_word = big_endian ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; + uint16_t next_word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; k++; uint16_t diff2 = uint16_t(next_word - 0xDC00); if((diff | diff2) > 0x3FF) { return std::make_pair(nullptr, reinterpret_cast(utf32_output)); } @@ -12904,7 +13048,7 @@ std::pair arm_convert_utf16_to_utf32_with_errors(const char16 while (buf + 16 <= end) { uint16x8_t in = vld1q_u16(reinterpret_cast(buf)); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x16_t swap = make_uint8x16_t(1, 0, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 15, 14); #else @@ -12931,13 +13075,13 @@ std::pair arm_convert_utf16_to_utf32_with_errors(const char16 size_t k = 0; if(size_t(end - buf) < forward + 1) { forward = size_t(end - buf - 1);} for(; k < forward; k++) { - uint16_t word = big_endian ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; + uint16_t word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k]) : buf[k]; if((word &0xF800 ) != 0xD800) { *utf32_output++ = char32_t(word); } else { // must be a surrogate pair uint16_t diff = uint16_t(word - 0xD800); - uint16_t next_word = big_endian ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; + uint16_t next_word = !match_system(big_endian) ? scalar::utf16::swap_bytes(buf[k + 1]) : buf[k + 1]; k++; uint16_t diff2 = uint16_t(next_word - 0xDC00); if((diff | diff2) > 0x3FF) { return std::make_pair(result(error_code::SURROGATE, buf - start + k - 1), reinterpret_cast(utf32_output)); } @@ -13445,7 +13589,7 @@ std::pair arm_convert_utf32_to_utf16(const char32_t* const uint16x4_t v_dfff = vmov_n_u16((uint16_t)0xdfff); forbidden_bytemask = vorr_u16(vand_u16(vcle_u16(utf16_packed, v_dfff), vcge_u16(utf16_packed, v_d800)), forbidden_bytemask); - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x8_t swap = make_uint8x8_t(1, 0, 3, 2, 5, 4, 7, 6); #else @@ -13465,14 +13609,14 @@ std::pair arm_convert_utf32_to_utf16(const char32_t* if((word & 0xFFFF0000)==0) { // will not generate a surrogate pair if (word >= 0xD800 && word <= 0xDFFF) { return std::make_pair(nullptr, reinterpret_cast(utf16_output)); } - *utf16_output++ = big_endian ? char16_t(word >> 8 | word << 8) : char16_t(word); + *utf16_output++ = !match_system(big_endian) ? char16_t(word >> 8 | word << 8) : char16_t(word); } else { // will generate a surrogate pair if (word > 0x10FFFF) { return std::make_pair(nullptr, reinterpret_cast(utf16_output)); } word -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (word >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (word & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = uint16_t(high_surrogate >> 8 | high_surrogate << 8); low_surrogate = uint16_t(low_surrogate << 8 | low_surrogate >> 8); } @@ -13513,7 +13657,7 @@ std::pair arm_convert_utf32_to_utf16_with_errors(const char32 return std::make_pair(result(error_code::SURROGATE, buf - start), reinterpret_cast(utf16_output)); } - if (big_endian) { + if (!match_system(big_endian)) { #ifdef SIMDUTF_REGULAR_VISUAL_STUDIO const uint8x8_t swap = make_uint8x8_t(1, 0, 3, 2, 5, 4, 7, 6); #else @@ -13533,14 +13677,14 @@ std::pair arm_convert_utf32_to_utf16_with_errors(const char32 if((word & 0xFFFF0000)==0) { // will not generate a surrogate pair if (word >= 0xD800 && word <= 0xDFFF) { return std::make_pair(result(error_code::SURROGATE, buf - start + k), reinterpret_cast(utf16_output)); } - *utf16_output++ = big_endian ? char16_t(word >> 8 | word << 8) : char16_t(word); + *utf16_output++ = !match_system(big_endian) ? char16_t(word >> 8 | word << 8) : char16_t(word); } else { // will generate a surrogate pair if (word > 0x10FFFF) { return std::make_pair(result(error_code::TOO_LARGE, buf - start + k), reinterpret_cast(utf16_output)); } word -= 0x10000; uint16_t high_surrogate = uint16_t(0xD800 + (word >> 10)); uint16_t low_surrogate = uint16_t(0xDC00 + (word & 0x3FF)); - if (big_endian) { + if (!match_system(big_endian)) { high_surrogate = uint16_t(high_surrogate >> 8 | high_surrogate << 8); low_surrogate = uint16_t(low_surrogate << 8 | low_surrogate >> 8); } @@ -14705,7 +14849,7 @@ simdutf_really_inline size_t count_code_points(const char16_t* in, size_t size) size_t count = 0; for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t not_pair = input.not_in_range(0xDC00, 0xDFFF); count += count_ones(not_pair) / 2; } @@ -14719,7 +14863,7 @@ simdutf_really_inline size_t utf8_length_from_utf16(const char16_t* in, size_t s // This algorithm could no doubt be improved! for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t ascii_mask = input.lteq(0x7F); uint64_t twobyte_mask = input.lteq(0x7FF); uint64_t not_pair_mask = input.not_in_range(0xD800, 0xDFFF); @@ -17902,8 +18046,8 @@ implementation::detect_encodings(const char *input, if (surrogates) { is_utf8 = false; - // Can still be either UTF-16LE or UTF-32LE depending on the positions - // of the surrogates To be valid UTF-32LE, a surrogate cannot be in the + // Can still be either UTF-16LE or UTF-32 depending on the positions + // of the surrogates To be valid UTF-32, a surrogate cannot be in the // two most significant bytes of any 32-bit word. On the other hand, to // be valid UTF-16LE, at least one surrogate must be in the two most // significant bytes of a 32-bit word since they always come in pairs in @@ -17940,7 +18084,7 @@ implementation::detect_encodings(const char *input, } else { is_utf16 = false; - // Check for UTF-32LE + // Check for UTF-32 if (length % 4 == 0) { const char32_t *input32 = reinterpret_cast(buf); const char32_t *end32 = @@ -17955,7 +18099,7 @@ implementation::detect_encodings(const char *input, } // If no surrogate, validate under other encodings as well - // UTF-32LE validation + // UTF-32 validation currentmax = _mm512_max_epu32(in, currentmax); // UTF-8 validation @@ -19081,8 +19225,8 @@ int avx2_detect_encodings(const char * buf, size_t len) { if (surrogates_bitmask0 != 0x0) { // Cannot be UTF8 is_utf8 = false; - // Can still be either UTF-16LE or UTF-32LE depending on the positions of the surrogates - // To be valid UTF-32LE, a surrogate cannot be in the two most significant bytes of any 32-bit word. + // Can still be either UTF-16LE or UTF-32 depending on the positions of the surrogates + // To be valid UTF-32, a surrogate cannot be in the two most significant bytes of any 32-bit word. // On the other hand, to be valid UTF-16LE, at least one surrogate must be in the two most significant // bytes of a 32-bit word since they always come in pairs in UTF-16LE. // Note that we always proceed in multiple of 4 before this point so there is no offset in 32-bit words. @@ -19153,7 +19297,7 @@ int avx2_detect_encodings(const char * buf, size_t len) { } } else { is_utf16 = false; - // Check for UTF-32LE + // Check for UTF-32 if (len % 4 == 0) { const char32_t * input = reinterpret_cast(buf); const char32_t* end32 = reinterpret_cast(start) + len/4; @@ -19188,7 +19332,7 @@ int avx2_detect_encodings(const char * buf, size_t len) { } // If no surrogate, validate under other encodings as well - // UTF-32LE validation + // UTF-32 validation currentmax = _mm256_max_epu32(in, currentmax); currentmax = _mm256_max_epu32(nextin, currentmax); @@ -22278,7 +22422,7 @@ simdutf_really_inline size_t count_code_points(const char16_t* in, size_t size) size_t count = 0; for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t not_pair = input.not_in_range(0xDC00, 0xDFFF); count += count_ones(not_pair) / 2; } @@ -22292,7 +22436,7 @@ simdutf_really_inline size_t utf8_length_from_utf16(const char16_t* in, size_t s // This algorithm could no doubt be improved! for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t ascii_mask = input.lteq(0x7F); uint64_t twobyte_mask = input.lteq(0x7FF); uint64_t not_pair_mask = input.not_in_range(0xD800, 0xDFFF); @@ -24001,7 +24145,7 @@ simdutf_really_inline size_t count_code_points(const char16_t* in, size_t size) size_t count = 0; for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t not_pair = input.not_in_range(0xDC00, 0xDFFF); count += count_ones(not_pair) / 2; } @@ -24015,7 +24159,7 @@ simdutf_really_inline size_t utf8_length_from_utf16(const char16_t* in, size_t s // This algorithm could no doubt be improved! for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t ascii_mask = input.lteq(0x7F); uint64_t twobyte_mask = input.lteq(0x7FF); uint64_t not_pair_mask = input.not_in_range(0xD800, 0xDFFF); @@ -24382,8 +24526,8 @@ int sse_detect_encodings(const char * buf, size_t len) { if (surrogates_bitmask0 != 0x0 || surrogates_bitmask1 != 0x0) { // Cannot be UTF8 is_utf8 = false; - // Can still be either UTF-16LE or UTF-32LE depending on the positions of the surrogates - // To be valid UTF-32LE, a surrogate cannot be in the two most significant bytes of any 32-bit word. + // Can still be either UTF-16LE or UTF-32 depending on the positions of the surrogates + // To be valid UTF-32, a surrogate cannot be in the two most significant bytes of any 32-bit word. // On the other hand, to be valid UTF-16LE, at least one surrogate must be in the two most significant // bytes of a 32-bit word since they always come in pairs in UTF-16LE. // Note that we always proceed in multiple of 4 before this point so there is no offset in 32-bit words. @@ -24459,7 +24603,7 @@ int sse_detect_encodings(const char * buf, size_t len) { } } else { is_utf16 = false; - // Check for UTF-32LE + // Check for UTF-32 if (len % 4 == 0) { const char32_t * input = reinterpret_cast(buf); const char32_t* end32 = reinterpret_cast(start) + len/4; @@ -24498,7 +24642,7 @@ int sse_detect_encodings(const char * buf, size_t len) { } // If no surrogate, validate under other encodings as well - // UTF-32LE validation + // UTF-32 validation currentmax = _mm_max_epu32(in, currentmax); currentmax = _mm_max_epu32(secondin, currentmax); currentmax = _mm_max_epu32(thirdin, currentmax); @@ -27596,7 +27740,7 @@ simdutf_really_inline size_t count_code_points(const char16_t* in, size_t size) size_t count = 0; for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t not_pair = input.not_in_range(0xDC00, 0xDFFF); count += count_ones(not_pair) / 2; } @@ -27610,7 +27754,7 @@ simdutf_really_inline size_t utf8_length_from_utf16(const char16_t* in, size_t s // This algorithm could no doubt be improved! for(;pos + 32 <= size; pos += 32) { simd16x32 input(reinterpret_cast(in + pos)); - if (big_endian) input.swap_bytes(); + if (!match_system(big_endian)) input.swap_bytes(); uint64_t ascii_mask = input.lteq(0x7F); uint64_t twobyte_mask = input.lteq(0x7FF); uint64_t not_pair_mask = input.not_in_range(0xD800, 0xDFFF); diff --git a/deps/simdutf/simdutf.h b/deps/simdutf/simdutf.h index f3f63e78d94d7b..cf236fe3d1c8a9 100644 --- a/deps/simdutf/simdutf.h +++ b/deps/simdutf/simdutf.h @@ -1,4 +1,4 @@ -/* auto-generated on 2023-01-02 15:43:33 -0500. Do not edit! */ +/* auto-generated on 2023-01-18 12:43:26 -0500. Do not edit! */ // dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf.h /* begin file include/simdutf.h */ #ifndef SIMDUTF_H @@ -64,6 +64,48 @@ #include #endif +/** + * We want to check that it is actually a little endian system at + * compile-time. + */ + +#if defined(__BYTE_ORDER__) && defined(__ORDER_BIG_ENDIAN__) +#define SIMDUTF_IS_BIG_ENDIAN (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) +#elif defined(_WIN32) +#define SIMDUTF_IS_BIG_ENDIAN 0 +#else +#if defined(__APPLE__) || defined(__FreeBSD__) // defined __BYTE_ORDER__ && defined __ORDER_BIG_ENDIAN__ +#include +#elif defined(sun) || defined(__sun) // defined(__APPLE__) || defined(__FreeBSD__) +#include +#else // defined(__APPLE__) || defined(__FreeBSD__) + +#ifdef __has_include +#if __has_include() +#include +#endif //__has_include() +#endif //__has_include + +#endif // defined(__APPLE__) || defined(__FreeBSD__) + + +#ifndef !defined(__BYTE_ORDER__) || !defined(__ORDER_LITTLE_ENDIAN__) +#define SIMDUTF_IS_BIG_ENDIAN 0 +#endif + +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +#define SIMDUTF_IS_BIG_ENDIAN 0 +#else // __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +#define SIMDUTF_IS_BIG_ENDIAN 1 +#endif // __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + +#endif // defined __BYTE_ORDER__ && defined __ORDER_BIG_ENDIAN__ + + +/** + * At this point in time, SIMDUTF_IS_BIG_ENDIAN is defined. + */ + #ifdef _MSC_VER #define SIMDUTF_VISUAL_STUDIO 1 /** @@ -98,8 +140,10 @@ #define SIMDUTF_IS_ARM64 1 #elif defined(__PPC64__) || defined(_M_PPC64) //#define SIMDUTF_IS_PPC64 1 -#pragma message("The simdutf library does yet support SIMD acceleration under\ -POWER processors. Please see https://github.com/lemire/simdutf/issues/51") +// The simdutf library does yet support SIMD acceleration under +// POWER processors. Please see https://github.com/lemire/simdutf/issues/51 +#elif defined(__s390__) +// s390 IBM system. Big endian. #else // The simdutf library is designed // for 64-bit processors and it seems that you are not @@ -456,6 +500,8 @@ enum endianness { BIG }; +bool match_system(endianness e); + std::string to_string(encoding_type bom); // Note that BOM for UTF8 is discouraged. @@ -526,14 +572,14 @@ SIMDUTF_DISABLE_UNDESIRED_WARNINGS #define SIMDUTF_SIMDUTF_VERSION_H /** The version of simdutf being used (major.minor.revision) */ -#define SIMDUTF_VERSION "2.1.0" +#define SIMDUTF_VERSION "3.1.0" namespace simdutf { enum { /** * The major version (MAJOR.minor.revision) of simdutf being used. */ - SIMDUTF_VERSION_MAJOR = 2, + SIMDUTF_VERSION_MAJOR = 3, /** * The minor version (major.MINOR.revision) of simdutf being used. */ @@ -872,6 +918,21 @@ simdutf_warn_unused bool validate_ascii(const char *buf, size_t len) noexcept; */ simdutf_warn_unused result validate_ascii_with_errors(const char *buf, size_t len) noexcept; +/** + * Using native endianness; Validate the UTF-16 string. + * This function may be best when you expect the input to be almost always valid. + * Otherwise, consider using validate_utf16_with_errors. + * + * Overridden by each implementation. + * + * This function is not BOM-aware. + * + * @param buf the UTF-16 string to validate. + * @param len the length of the string in number of 2-byte words (char16_t). + * @return true if and only if the string is valid UTF-16. + */ +simdutf_warn_unused bool validate_utf16(const char16_t *buf, size_t len) noexcept; + /** * Validate the UTF-16LE string. This function may be best when you expect * the input to be almost always valid. Otherwise, consider using @@ -902,6 +963,20 @@ simdutf_warn_unused bool validate_utf16le(const char16_t *buf, size_t len) noexc */ simdutf_warn_unused bool validate_utf16be(const char16_t *buf, size_t len) noexcept; +/** + * Using native endianness; Validate the UTF-16 string and stop on error. + * It might be faster than validate_utf16 when an error is expected to occur early. + * + * Overridden by each implementation. + * + * This function is not BOM-aware. + * + * @param buf the UTF-16 string to validate. + * @param len the length of the string in number of 2-byte words (char16_t). + * @return a result pair struct with an error code and either the position of the error if any or the number of words validated if successful. + */ +simdutf_warn_unused result validate_utf16_with_errors(const char16_t *buf, size_t len) noexcept; + /** * Validate the UTF-16LE string and stop on error. It might be faster than * validate_utf16le when an error is expected to occur early. @@ -931,7 +1006,7 @@ simdutf_warn_unused result validate_utf16le_with_errors(const char16_t *buf, siz simdutf_warn_unused result validate_utf16be_with_errors(const char16_t *buf, size_t len) noexcept; /** - * Validate the UTF-32LE string. This function may be best when you expect + * Validate the UTF-32 string. This function may be best when you expect * the input to be almost always valid. Otherwise, consider using * validate_utf32_with_errors. * @@ -939,26 +1014,39 @@ simdutf_warn_unused result validate_utf16be_with_errors(const char16_t *buf, siz * * This function is not BOM-aware. * - * @param buf the UTF-32LE string to validate. + * @param buf the UTF-32 string to validate. * @param len the length of the string in number of 4-byte words (char32_t). - * @return true if and only if the string is valid UTF-32LE. + * @return true if and only if the string is valid UTF-32. */ simdutf_warn_unused bool validate_utf32(const char32_t *buf, size_t len) noexcept; /** - * Validate the UTF-32LE string and stop on error. It might be faster than + * Validate the UTF-32 string and stop on error. It might be faster than * validate_utf32 when an error is expected to occur early. * * Overridden by each implementation. * * This function is not BOM-aware. * - * @param buf the UTF-32LE string to validate. + * @param buf the UTF-32 string to validate. * @param len the length of the string in number of 4-byte words (char32_t). * @return a result pair struct with an error code and either the position of the error if any or the number of words validated if successful. */ simdutf_warn_unused result validate_utf32_with_errors(const char32_t *buf, size_t len) noexcept; +/** + * Using native endianness; Convert possibly broken UTF-8 string into UTF-16 string. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * @param input the UTF-8 string to convert + * @param length the length of the string in bytes + * @param utf16_buffer the pointer to buffer that can hold conversion result + * @return the number of written char16_t; 0 if the input was not valid UTF-8 string + */ +simdutf_warn_unused size_t convert_utf8_to_utf16(const char * input, size_t length, char16_t* utf16_output) noexcept; + /** * Convert possibly broken UTF-8 string into UTF-16LE string. * @@ -985,6 +1073,20 @@ simdutf_warn_unused size_t convert_utf8_to_utf16le(const char * input, size_t le */ simdutf_warn_unused size_t convert_utf8_to_utf16be(const char * input, size_t length, char16_t* utf16_output) noexcept; +/** + * Using native endianness; Convert possibly broken UTF-8 string into UTF-16 + * string and stop on error. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * @param input the UTF-8 string to convert + * @param length the length of the string in bytes + * @param utf16_buffer the pointer to buffer that can hold conversion result + * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. + */ +simdutf_warn_unused result convert_utf8_to_utf16_with_errors(const char * input, size_t length, char16_t* utf16_output) noexcept; + /** * Convert possibly broken UTF-8 string into UTF-16LE string and stop on error. * @@ -1012,7 +1114,7 @@ simdutf_warn_unused result convert_utf8_to_utf16le_with_errors(const char * inpu simdutf_warn_unused result convert_utf8_to_utf16be_with_errors(const char * input, size_t length, char16_t* utf16_output) noexcept; /** - * Convert possibly broken UTF-8 string into UTF-32LE string. + * Convert possibly broken UTF-8 string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1025,7 +1127,7 @@ simdutf_warn_unused result convert_utf8_to_utf16be_with_errors(const char * inpu simdutf_warn_unused size_t convert_utf8_to_utf32(const char * input, size_t length, char32_t* utf32_output) noexcept; /** - * Convert possibly broken UTF-8 string into UTF-32LE string and stop on error. + * Convert possibly broken UTF-8 string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1037,6 +1139,18 @@ simdutf_warn_unused size_t convert_utf8_to_utf32(const char * input, size_t leng */ simdutf_warn_unused result convert_utf8_to_utf32_with_errors(const char * input, size_t length, char32_t* utf32_output) noexcept; +/** + * Using native endianness; Convert valid UTF-8 string into UTF-16 string. + * + * This function assumes that the input string is valid UTF-8. + * + * @param input the UTF-8 string to convert + * @param length the length of the string in bytes + * @param utf16_buffer the pointer to buffer that can hold conversion result + * @return the number of written char16_t + */ +simdutf_warn_unused size_t convert_valid_utf8_to_utf16(const char * input, size_t length, char16_t* utf16_buffer) noexcept; + /** * Convert valid UTF-8 string into UTF-16LE string. * @@ -1062,7 +1176,7 @@ simdutf_warn_unused size_t convert_valid_utf8_to_utf16le(const char * input, siz simdutf_warn_unused size_t convert_valid_utf8_to_utf16be(const char * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert valid UTF-8 string into UTF-32LE string. + * Convert valid UTF-8 string into UTF-32 string. * * This function assumes that the input string is valid UTF-8. * @@ -1087,7 +1201,7 @@ simdutf_warn_unused size_t convert_valid_utf8_to_utf32(const char * input, size_ simdutf_warn_unused size_t utf16_length_from_utf8(const char * input, size_t length) noexcept; /** - * Compute the number of 4-byte words that this UTF-8 string would require in UTF-32LE format. + * Compute the number of 4-byte words that this UTF-8 string would require in UTF-32 format. * * This function is equivalent to count_utf8 * @@ -1097,10 +1211,25 @@ simdutf_warn_unused size_t utf16_length_from_utf8(const char * input, size_t len * * @param input the UTF-8 string to process * @param length the length of the string in bytes - * @return the number of char32_t words required to encode the UTF-8 string as UTF-32LE + * @return the number of char32_t words required to encode the UTF-8 string as UTF-32 */ simdutf_warn_unused size_t utf32_length_from_utf8(const char * input, size_t length) noexcept; +/** + * Using native endianness; Convert possibly broken UTF-16 string into UTF-8 string. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf8_buffer the pointer to buffer that can hold conversion result + * @return number of written words; 0 if input is not a valid UTF-16LE string + */ +simdutf_warn_unused size_t convert_utf16_to_utf8(const char16_t * input, size_t length, char* utf8_buffer) noexcept; + /** * Convert possibly broken UTF-16LE string into UTF-8 string. * @@ -1131,6 +1260,21 @@ simdutf_warn_unused size_t convert_utf16le_to_utf8(const char16_t * input, size_ */ simdutf_warn_unused size_t convert_utf16be_to_utf8(const char16_t * input, size_t length, char* utf8_buffer) noexcept; +/** + * Using native endianness; Convert possibly broken UTF-16 string into UTF-8 string and stop on error. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf8_buffer the pointer to buffer that can hold conversion result + * @return a result pair struct with an error code and either the position of the error if any or the number of char written if successful. + */ +simdutf_warn_unused result convert_utf16_to_utf8_with_errors(const char16_t * input, size_t length, char* utf8_buffer) noexcept; + /** * Convert possibly broken UTF-16LE string into UTF-8 string and stop on error. * @@ -1161,6 +1305,20 @@ simdutf_warn_unused result convert_utf16le_to_utf8_with_errors(const char16_t * */ simdutf_warn_unused result convert_utf16be_to_utf8_with_errors(const char16_t * input, size_t length, char* utf8_buffer) noexcept; +/** + * Using native endianness; Convert valid UTF-16 string into UTF-8 string. + * + * This function assumes that the input string is valid UTF-16LE. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf8_buffer the pointer to buffer that can hold the conversion result + * @return number of written words; 0 if conversion is not possible + */ +simdutf_warn_unused size_t convert_valid_utf16_to_utf8(const char16_t * input, size_t length, char* utf8_buffer) noexcept; + /** * Convert valid UTF-16LE string into UTF-8 string. * @@ -1190,7 +1348,22 @@ simdutf_warn_unused size_t convert_valid_utf16le_to_utf8(const char16_t * input, simdutf_warn_unused size_t convert_valid_utf16be_to_utf8(const char16_t * input, size_t length, char* utf8_buffer) noexcept; /** - * Convert possibly broken UTF-16LE string into UTF-32LE string. + * Using native endianness; Convert possibly broken UTF-16 string into UTF-32 string. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf32_buffer the pointer to buffer that can hold conversion result + * @return number of written words; 0 if input is not a valid UTF-16LE string + */ +simdutf_warn_unused size_t convert_utf16_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; + +/** + * Convert possibly broken UTF-16LE string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1205,7 +1378,7 @@ simdutf_warn_unused size_t convert_valid_utf16be_to_utf8(const char16_t * input, simdutf_warn_unused size_t convert_utf16le_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; /** - * Convert possibly broken UTF-16BE string into UTF-32LE string. + * Convert possibly broken UTF-16BE string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1220,7 +1393,23 @@ simdutf_warn_unused size_t convert_utf16le_to_utf32(const char16_t * input, size simdutf_warn_unused size_t convert_utf16be_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; /** - * Convert possibly broken UTF-16LE string into UTF-32LE string and stop on error. + * Using native endianness; Convert possibly broken UTF-16 string into + * UTF-32 string and stop on error. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf32_buffer the pointer to buffer that can hold conversion result + * @return a result pair struct with an error code and either the position of the error if any or the number of char32_t written if successful. + */ +simdutf_warn_unused result convert_utf16_to_utf32_with_errors(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; + +/** + * Convert possibly broken UTF-16LE string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1235,7 +1424,7 @@ simdutf_warn_unused size_t convert_utf16be_to_utf32(const char16_t * input, size simdutf_warn_unused result convert_utf16le_to_utf32_with_errors(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; /** - * Convert possibly broken UTF-16BE string into UTF-32LE string and stop on error. + * Convert possibly broken UTF-16BE string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1250,7 +1439,21 @@ simdutf_warn_unused result convert_utf16le_to_utf32_with_errors(const char16_t * simdutf_warn_unused result convert_utf16be_to_utf32_with_errors(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; /** - * Convert valid UTF-16LE string into UTF-32LE string. + * Using native endianness; Convert valid UTF-16 string into UTF-32 string. + * + * This function assumes that the input string is valid UTF-16 (native endianness). + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @param utf32_buffer the pointer to buffer that can hold the conversion result + * @return number of written words; 0 if conversion is not possible + */ +simdutf_warn_unused size_t convert_valid_utf16_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; + +/** + * Convert valid UTF-16LE string into UTF-32 string. * * This function assumes that the input string is valid UTF-16LE. * @@ -1264,7 +1467,7 @@ simdutf_warn_unused result convert_utf16be_to_utf32_with_errors(const char16_t * simdutf_warn_unused size_t convert_valid_utf16le_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; /** - * Convert valid UTF-16BE string into UTF-32LE string. + * Convert valid UTF-16BE string into UTF-32 string. * * This function assumes that the input string is valid UTF-16LE. * @@ -1277,6 +1480,18 @@ simdutf_warn_unused size_t convert_valid_utf16le_to_utf32(const char16_t * input */ simdutf_warn_unused size_t convert_valid_utf16be_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) noexcept; +/** + * Using native endianness; Compute the number of bytes that this UTF-16 + * string would require in UTF-8 format. + * + * This function does not validate the input. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @return the number of bytes required to encode the UTF-16LE string as UTF-8 + */ +simdutf_warn_unused size_t utf8_length_from_utf16(const char16_t * input, size_t length) noexcept; + /** * Compute the number of bytes that this UTF-16LE string would require in UTF-8 format. * @@ -1300,29 +1515,29 @@ simdutf_warn_unused size_t utf8_length_from_utf16le(const char16_t * input, size simdutf_warn_unused size_t utf8_length_from_utf16be(const char16_t * input, size_t length) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-8 string. + * Convert possibly broken UTF-32 string into UTF-8 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused size_t convert_utf32_to_utf8(const char32_t * input, size_t length, char* utf8_buffer) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-8 string and stop on error. + * Convert possibly broken UTF-32 string into UTF-8 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char written if successful. @@ -1330,13 +1545,13 @@ simdutf_warn_unused size_t convert_utf32_to_utf8(const char32_t * input, size_t simdutf_warn_unused result convert_utf32_to_utf8_with_errors(const char32_t * input, size_t length, char* utf8_buffer) noexcept; /** - * Convert valid UTF-32LE string into UTF-8 string. + * Convert valid UTF-32 string into UTF-8 string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -1344,44 +1559,75 @@ simdutf_warn_unused result convert_utf32_to_utf8_with_errors(const char32_t * in simdutf_warn_unused size_t convert_valid_utf32_to_utf8(const char32_t * input, size_t length, char* utf8_buffer) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-16LE string. + * Using native endianness; Convert possibly broken UTF-32 string into UTF-16 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string + */ +simdutf_warn_unused size_t convert_utf32_to_utf16(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; + +/** + * Convert possibly broken UTF-32 string into UTF-16LE string. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-32 string to convert + * @param length the length of the string in 4-byte words (char32_t) + * @param utf16_buffer the pointer to buffer that can hold conversion result + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused size_t convert_utf32_to_utf16le(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-16BE string. + * Convert possibly broken UTF-32 string into UTF-16BE string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused size_t convert_utf32_to_utf16be(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-16LE string and stop on error. + * Using native endianness; Convert possibly broken UTF-32 string into UTF-16 + * string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert + * @param length the length of the string in 4-byte words (char32_t) + * @param utf16_buffer the pointer to buffer that can hold conversion result + * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. + */ +simdutf_warn_unused result convert_utf32_to_utf16_with_errors(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; + +/** + * Convert possibly broken UTF-32 string into UTF-16LE string and stop on error. + * + * During the conversion also validation of the input string is done. + * This function is suitable to work with inputs from untrusted sources. + * + * This function is not BOM-aware. + * + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. @@ -1389,14 +1635,14 @@ simdutf_warn_unused size_t convert_utf32_to_utf16be(const char32_t * input, size simdutf_warn_unused result convert_utf32_to_utf16le_with_errors(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert possibly broken UTF-32LE string into UTF-16BE string and stop on error. + * Convert possibly broken UTF-32 string into UTF-16BE string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. @@ -1404,13 +1650,27 @@ simdutf_warn_unused result convert_utf32_to_utf16le_with_errors(const char32_t * simdutf_warn_unused result convert_utf32_to_utf16be_with_errors(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert valid UTF-32LE string into UTF-16LE string. + * Using native endianness; Convert valid UTF-32 string into UTF-16 string. + * + * This function assumes that the input string is valid UTF-32. + * + * This function is not BOM-aware. + * + * @param input the UTF-32 string to convert + * @param length the length of the string in 4-byte words (char32_t) + * @param utf16_buffer the pointer to buffer that can hold the conversion result + * @return number of written words; 0 if conversion is not possible + */ +simdutf_warn_unused size_t convert_valid_utf32_to_utf16(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; + +/** + * Convert valid UTF-32 string into UTF-16LE string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -1418,13 +1678,13 @@ simdutf_warn_unused result convert_utf32_to_utf16be_with_errors(const char32_t * simdutf_warn_unused size_t convert_valid_utf32_to_utf16le(const char32_t * input, size_t length, char16_t* utf16_buffer) noexcept; /** - * Convert valid UTF-32LE string into UTF-16BE string. + * Convert valid UTF-32 string into UTF-16BE string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -1446,29 +1706,45 @@ simdutf_warn_unused size_t convert_valid_utf32_to_utf16be(const char32_t * input void change_endianness_utf16(const char16_t * input, size_t length, char16_t * output) noexcept; /** - * Compute the number of bytes that this UTF-32LE string would require in UTF-8 format. + * Compute the number of bytes that this UTF-32 string would require in UTF-8 format. * * This function does not validate the input. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) - * @return the number of bytes required to encode the UTF-32LE string as UTF-8 + * @return the number of bytes required to encode the UTF-32 string as UTF-8 */ simdutf_warn_unused size_t utf8_length_from_utf32(const char32_t * input, size_t length) noexcept; /** - * Compute the number of two-byte words that this UTF-32LE string would require in UTF-16 format. + * Compute the number of two-byte words that this UTF-32 string would require in UTF-16 format. * * This function does not validate the input. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) - * @return the number of bytes required to encode the UTF-32LE string as UTF-16 + * @return the number of bytes required to encode the UTF-32 string as UTF-16 */ simdutf_warn_unused size_t utf16_length_from_utf32(const char32_t * input, size_t length) noexcept; /** - * Compute the number of bytes that this UTF-16LE string would require in UTF-32LE format. + * Using native endianness; Compute the number of bytes that this UTF-16 + * string would require in UTF-32 format. + * + * This function is equivalent to count_utf16. + * + * This function does not validate the input. + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to convert + * @param length the length of the string in 2-byte words (char16_t) + * @return the number of bytes required to encode the UTF-16LE string as UTF-32 + */ +simdutf_warn_unused size_t utf32_length_from_utf16(const char16_t * input, size_t length) noexcept; + +/** + * Compute the number of bytes that this UTF-16LE string would require in UTF-32 format. * * This function is equivalent to count_utf16le. * @@ -1478,12 +1754,12 @@ simdutf_warn_unused size_t utf16_length_from_utf32(const char32_t * input, size_ * * @param input the UTF-16LE string to convert * @param length the length of the string in 2-byte words (char16_t) - * @return the number of bytes required to encode the UTF-16LE string as UTF-32LE + * @return the number of bytes required to encode the UTF-16LE string as UTF-32 */ simdutf_warn_unused size_t utf32_length_from_utf16le(const char16_t * input, size_t length) noexcept; /** - * Compute the number of bytes that this UTF-16BE string would require in UTF-32LE format. + * Compute the number of bytes that this UTF-16BE string would require in UTF-32 format. * * This function is equivalent to count_utf16be. * @@ -1493,10 +1769,24 @@ simdutf_warn_unused size_t utf32_length_from_utf16le(const char16_t * input, siz * * @param input the UTF-16BE string to convert * @param length the length of the string in 2-byte words (char16_t) - * @return the number of bytes required to encode the UTF-16BE string as UTF-32LE + * @return the number of bytes required to encode the UTF-16BE string as UTF-32 */ simdutf_warn_unused size_t utf32_length_from_utf16be(const char16_t * input, size_t length) noexcept; +/** + * Count the number of code points (characters) in the string assuming that + * it is valid. + * + * This function assumes that the input string is valid UTF-16 (native endianness). + * + * This function is not BOM-aware. + * + * @param input the UTF-16 string to process + * @param length the length of the string in 2-byte words (char16_t) + * @return number of code points + */ +simdutf_warn_unused size_t count_utf16(const char16_t * input, size_t length) noexcept; + /** * Count the number of code points (characters) in the string assuming that * it is valid. @@ -1705,26 +1995,26 @@ class implementation { simdutf_warn_unused virtual result validate_utf16be_with_errors(const char16_t *buf, size_t len) const noexcept = 0; /** - * Validate the UTF-32LE string. + * Validate the UTF-32 string. * * Overridden by each implementation. * * This function is not BOM-aware. * - * @param buf the UTF-32LE string to validate. + * @param buf the UTF-32 string to validate. * @param len the length of the string in number of 4-byte words (char32_t). - * @return true if and only if the string is valid UTF-32LE. + * @return true if and only if the string is valid UTF-32. */ simdutf_warn_unused virtual bool validate_utf32(const char32_t *buf, size_t len) const noexcept = 0; /** - * Validate the UTF-32LE string and stop on error. + * Validate the UTF-32 string and stop on error. * * Overridden by each implementation. * * This function is not BOM-aware. * - * @param buf the UTF-32LE string to validate. + * @param buf the UTF-32 string to validate. * @param len the length of the string in number of 4-byte words (char32_t). * @return a result pair struct with an error code and either the position of the error if any or the number of words validated if successful. */ @@ -1783,7 +2073,7 @@ class implementation { simdutf_warn_unused virtual result convert_utf8_to_utf16be_with_errors(const char * input, size_t length, char16_t* utf16_output) const noexcept = 0; /** - * Convert possibly broken UTF-8 string into UTF-32LE string. + * Convert possibly broken UTF-8 string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1796,7 +2086,7 @@ class implementation { simdutf_warn_unused virtual size_t convert_utf8_to_utf32(const char * input, size_t length, char32_t* utf32_output) const noexcept = 0; /** - * Convert possibly broken UTF-8 string into UTF-32LE string and stop on error. + * Convert possibly broken UTF-8 string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1833,7 +2123,7 @@ class implementation { simdutf_warn_unused virtual size_t convert_valid_utf8_to_utf16be(const char * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert valid UTF-8 string into UTF-32LE string. + * Convert valid UTF-8 string into UTF-32 string. * * This function assumes that the input string is valid UTF-8. * @@ -1856,7 +2146,7 @@ class implementation { simdutf_warn_unused virtual size_t utf16_length_from_utf8(const char * input, size_t length) const noexcept = 0; /** - * Compute the number of 4-byte words that this UTF-8 string would require in UTF-32LE format. + * Compute the number of 4-byte words that this UTF-8 string would require in UTF-32 format. * * This function is equivalent to count_utf8. * @@ -1864,7 +2154,7 @@ class implementation { * * @param input the UTF-8 string to process * @param length the length of the string in bytes - * @return the number of char32_t words required to encode the UTF-8 string as UTF-32LE + * @return the number of char32_t words required to encode the UTF-8 string as UTF-32 */ simdutf_warn_unused virtual size_t utf32_length_from_utf8(const char * input, size_t length) const noexcept = 0; @@ -1957,7 +2247,7 @@ class implementation { simdutf_warn_unused virtual size_t convert_valid_utf16be_to_utf8(const char16_t * input, size_t length, char* utf8_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-16LE string into UTF-32LE string. + * Convert possibly broken UTF-16LE string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1972,7 +2262,7 @@ class implementation { simdutf_warn_unused virtual size_t convert_utf16le_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-16BE string into UTF-32LE string. + * Convert possibly broken UTF-16BE string into UTF-32 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -1987,7 +2277,7 @@ class implementation { simdutf_warn_unused virtual size_t convert_utf16be_to_utf32(const char16_t * input, size_t length, char32_t* utf32_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-16LE string into UTF-32LE string and stop on error. + * Convert possibly broken UTF-16LE string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -2002,7 +2292,7 @@ class implementation { simdutf_warn_unused virtual result convert_utf16le_to_utf32_with_errors(const char16_t * input, size_t length, char32_t* utf32_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-16BE string into UTF-32LE string and stop on error. + * Convert possibly broken UTF-16BE string into UTF-32 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. @@ -2017,7 +2307,7 @@ class implementation { simdutf_warn_unused virtual result convert_utf16be_to_utf32_with_errors(const char16_t * input, size_t length, char32_t* utf32_buffer) const noexcept = 0; /** - * Convert valid UTF-16LE string into UTF-32LE string. + * Convert valid UTF-16LE string into UTF-32 string. * * This function assumes that the input string is valid UTF-16LE. * @@ -2071,29 +2361,29 @@ class implementation { simdutf_warn_unused virtual size_t utf8_length_from_utf16be(const char16_t * input, size_t length) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-8 string. + * Convert possibly broken UTF-32 string into UTF-8 string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused virtual size_t convert_utf32_to_utf8(const char32_t * input, size_t length, char* utf8_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-8 string and stop on error. + * Convert possibly broken UTF-32 string into UTF-8 string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char written if successful. @@ -2101,13 +2391,13 @@ class implementation { simdutf_warn_unused virtual result convert_utf32_to_utf8_with_errors(const char32_t * input, size_t length, char* utf8_buffer) const noexcept = 0; /** - * Convert valid UTF-32LE string into UTF-8 string. + * Convert valid UTF-32 string into UTF-8 string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf8_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -2115,44 +2405,44 @@ class implementation { simdutf_warn_unused virtual size_t convert_valid_utf32_to_utf8(const char32_t * input, size_t length, char* utf8_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-16LE string. + * Convert possibly broken UTF-32 string into UTF-16LE string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused virtual size_t convert_utf32_to_utf16le(const char32_t * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-16BE string. + * Convert possibly broken UTF-32 string into UTF-16BE string. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result - * @return number of written words; 0 if input is not a valid UTF-32LE string + * @return number of written words; 0 if input is not a valid UTF-32 string */ simdutf_warn_unused virtual size_t convert_utf32_to_utf16be(const char32_t * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-16LE string and stop on error. + * Convert possibly broken UTF-32 string into UTF-16LE string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. @@ -2160,14 +2450,14 @@ class implementation { simdutf_warn_unused virtual result convert_utf32_to_utf16le_with_errors(const char32_t * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert possibly broken UTF-32LE string into UTF-16BE string and stop on error. + * Convert possibly broken UTF-32 string into UTF-16BE string and stop on error. * * During the conversion also validation of the input string is done. * This function is suitable to work with inputs from untrusted sources. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold conversion result * @return a result pair struct with an error code and either the position of the error if any or the number of char16_t written if successful. @@ -2175,13 +2465,13 @@ class implementation { simdutf_warn_unused virtual result convert_utf32_to_utf16be_with_errors(const char32_t * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert valid UTF-32LE string into UTF-16LE string. + * Convert valid UTF-32 string into UTF-16LE string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -2189,13 +2479,13 @@ class implementation { simdutf_warn_unused virtual size_t convert_valid_utf32_to_utf16le(const char32_t * input, size_t length, char16_t* utf16_buffer) const noexcept = 0; /** - * Convert valid UTF-32LE string into UTF-16BE string. + * Convert valid UTF-32 string into UTF-16BE string. * - * This function assumes that the input string is valid UTF-32LE. + * This function assumes that the input string is valid UTF-32. * * This function is not BOM-aware. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) * @param utf16_buffer the pointer to buffer that can hold the conversion result * @return number of written words; 0 if conversion is not possible @@ -2217,29 +2507,29 @@ class implementation { virtual void change_endianness_utf16(const char16_t * input, size_t length, char16_t * output) const noexcept = 0; /** - * Compute the number of bytes that this UTF-32LE string would require in UTF-8 format. + * Compute the number of bytes that this UTF-32 string would require in UTF-8 format. * * This function does not validate the input. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) - * @return the number of bytes required to encode the UTF-32LE string as UTF-8 + * @return the number of bytes required to encode the UTF-32 string as UTF-8 */ simdutf_warn_unused virtual size_t utf8_length_from_utf32(const char32_t * input, size_t length) const noexcept = 0; /** - * Compute the number of two-byte words that this UTF-32LE string would require in UTF-16 format. + * Compute the number of two-byte words that this UTF-32 string would require in UTF-16 format. * * This function does not validate the input. * - * @param input the UTF-32LE string to convert + * @param input the UTF-32 string to convert * @param length the length of the string in 4-byte words (char32_t) - * @return the number of bytes required to encode the UTF-32LE string as UTF-16 + * @return the number of bytes required to encode the UTF-32 string as UTF-16 */ simdutf_warn_unused virtual size_t utf16_length_from_utf32(const char32_t * input, size_t length) const noexcept = 0; /* - * Compute the number of bytes that this UTF-16LE string would require in UTF-32LE format. + * Compute the number of bytes that this UTF-16LE string would require in UTF-32 format. * * This function is equivalent to count_utf16le. * @@ -2249,12 +2539,12 @@ class implementation { * * @param input the UTF-16LE string to convert * @param length the length of the string in 2-byte words (char16_t) - * @return the number of bytes required to encode the UTF-16LE string as UTF-32LE + * @return the number of bytes required to encode the UTF-16LE string as UTF-32 */ simdutf_warn_unused virtual size_t utf32_length_from_utf16le(const char16_t * input, size_t length) const noexcept = 0; /* - * Compute the number of bytes that this UTF-16BE string would require in UTF-32LE format. + * Compute the number of bytes that this UTF-16BE string would require in UTF-32 format. * * This function is equivalent to count_utf16be. * @@ -2264,7 +2554,7 @@ class implementation { * * @param input the UTF-16BE string to convert * @param length the length of the string in 2-byte words (char16_t) - * @return the number of bytes required to encode the UTF-16BE string as UTF-32LE + * @return the number of bytes required to encode the UTF-16BE string as UTF-32 */ simdutf_warn_unused virtual size_t utf32_length_from_utf16be(const char16_t * input, size_t length) const noexcept = 0; @@ -2429,19 +2719,22 @@ class atomic_ptr { #endif }; +class detect_best_supported_implementation_on_first_use; + } // namespace internal /** - * The list of available implementations compiled into simdutf. + * The list of available implementations compiled into simdjson. */ -extern SIMDUTF_DLLIMPORTEXPORT const internal::available_implementation_list available_implementations; +extern SIMDUTF_DLLIMPORTEXPORT const internal::available_implementation_list& get_available_implementations(); /** * The active implementation. * * Automatically initialized on first use to the most advanced implementation supported by this hardware. */ -extern SIMDUTF_DLLIMPORTEXPORT internal::atomic_ptr active_implementation; +extern SIMDUTF_DLLIMPORTEXPORT internal::atomic_ptr& get_active_implementation(); + } // namespace simdutf From 646cadccd0f2606ae1a4b523138adf0619ecd9ba Mon Sep 17 00:00:00 2001 From: Yagiz Nizipli Date: Wed, 18 Jan 2023 15:59:20 -0500 Subject: [PATCH 183/191] src: fix endianness of simdutf PR-URL: https://github.com/nodejs/node/pull/46257 Reviewed-By: Anna Henningsen Reviewed-By: Joyee Cheung Reviewed-By: Yagiz Nizipli Reviewed-By: Stephen Belanger Reviewed-By: Rich Trott --- src/node_builtins.cc | 8 ++++---- test/cctest/test_util.cc | 14 -------------- 2 files changed, 4 insertions(+), 18 deletions(-) diff --git a/src/node_builtins.cc b/src/node_builtins.cc index fc671eb88a856f..91c008be921294 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -253,10 +253,10 @@ bool BuiltinLoader::Add(const char* id, std::string_view utf8source) { size_t expected_u16_length = simdutf::utf16_length_from_utf8(utf8source.data(), utf8source.length()); auto out = std::make_shared>(expected_u16_length); - size_t u16_length = simdutf::convert_utf8_to_utf16le( - utf8source.data(), - utf8source.length(), - reinterpret_cast(out->data())); + size_t u16_length = + simdutf::convert_utf8_to_utf16(utf8source.data(), + utf8source.length(), + reinterpret_cast(out->data())); out->resize(u16_length); return Add(id, UnionBytes(out)); } diff --git a/test/cctest/test_util.cc b/test/cctest/test_util.cc index 443a03117c09fc..5b83e07db3b7c2 100644 --- a/test/cctest/test_util.cc +++ b/test/cctest/test_util.cc @@ -299,17 +299,3 @@ TEST(UtilTest, SPrintF) { const std::string with_zero = std::string("a") + '\0' + 'b'; EXPECT_EQ(SPrintF("%s", with_zero), with_zero); } - -TEST(UtilTest, SimdutfEndiannessDoesNotMeanEndianness) { - // In simdutf, "LE" does *not* refer to Little Endian, it refers - // to 16-byte code units that are stored using *host* endianness. - // This is weird and confusing naming, and so we add this assertion - // here to verify that this is actually the case (so that CI tells - // us if it changed, because for most people Little Endian is - // host endianness, so locally everything would work fine). - const char utf8source[] = "\xe7\x8c\xab"; - char16_t u16output; - size_t u16len = simdutf::convert_utf8_to_utf16le(utf8source, 3, &u16output); - EXPECT_EQ(u16len, 1u); - EXPECT_EQ(u16output, 0x732B); -} From a3056f4125d3607f7d6592ddc9934ecc78c1adf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Thu, 19 Jan 2023 01:14:07 +0100 Subject: [PATCH 184/191] test: refactor to avoid mutation of global by a loader This makes the test compatible with off-thread loaders. Co-Authored-By: Geoffrey Booth PR-URL: https://github.com/nodejs/node/pull/46220 Reviewed-By: Antoine du Hamel Reviewed-By: Geoffrey Booth --- .../test-esm-loader-resolve-type.mjs | 32 ++++++++++--------- .../es-module-loaders/hook-resolve-type.mjs | 25 ++++++++++++++- 2 files changed, 41 insertions(+), 16 deletions(-) diff --git a/test/es-module/test-esm-loader-resolve-type.mjs b/test/es-module/test-esm-loader-resolve-type.mjs index 722cf5404d25de..482320c664c5d8 100644 --- a/test/es-module/test-esm-loader-resolve-type.mjs +++ b/test/es-module/test-esm-loader-resolve-type.mjs @@ -6,6 +6,9 @@ import * as fs from 'fs'; allowGlobals(global.getModuleTypeStats); +const { importedESM: importedESMBefore, + importedCJS: importedCJSBefore } = await global.getModuleTypeStats(); + const basePath = new URL('./node_modules/', import.meta.url); @@ -17,25 +20,24 @@ const createDir = (path) => { }; const moduleName = 'module-counter-by-type'; - const moduleDir = rel(`${moduleName}`); -createDir(basePath); -createDir(moduleDir); -fs.cpSync( - fixtures.path('es-modules', moduleName), - moduleDir, - { recursive: true } -); - -const { importedESM: importedESMBefore, - importedCJS: importedCJSBefore } = global.getModuleTypeStats(); - -await import(`${moduleName}`).finally(() => { +try { + createDir(basePath); + createDir(moduleDir); + fs.cpSync( + fixtures.path('es-modules', moduleName), + moduleDir, + { recursive: true } + ); + + + await import(`${moduleName}`); +} finally { fs.rmSync(basePath, { recursive: true, force: true }); -}); +} const { importedESM: importedESMAfter, - importedCJS: importedCJSAfter } = global.getModuleTypeStats(); + importedCJS: importedCJSAfter } = await global.getModuleTypeStats(); // Dynamic import above should increment ESM counter but not CJS counter assert.strictEqual(importedESMBefore + 1, importedESMAfter); diff --git a/test/fixtures/es-module-loaders/hook-resolve-type.mjs b/test/fixtures/es-module-loaders/hook-resolve-type.mjs index b1f5606c0e4a0a..a4d87938ad843f 100644 --- a/test/fixtures/es-module-loaders/hook-resolve-type.mjs +++ b/test/fixtures/es-module-loaders/hook-resolve-type.mjs @@ -1,6 +1,29 @@ let importedESM = 0; let importedCJS = 0; -global.getModuleTypeStats = () => { return {importedESM, importedCJS} }; + +export function globalPreload({ port }) { + port.on('message', (int32) => { + port.postMessage({ importedESM, importedCJS }); + Atomics.store(int32, 0, 1); + Atomics.notify(int32, 0); + }); + port.unref(); + return ` + const { receiveMessageOnPort } = getBuiltin('worker_threads'); + global.getModuleTypeStats = async function getModuleTypeStats() { + const sab = new SharedArrayBuffer(4); + const int32 = new Int32Array(sab); + port.postMessage(int32); + // Artificial timeout to keep the event loop alive. + // https://bugs.chromium.org/p/v8/issues/detail?id=13238 + // TODO(targos) Remove when V8 issue is resolved. + const timeout = setTimeout(() => { throw new Error('timeout'); }, 1_000); + await Atomics.waitAsync(int32, 0, 0).value; + clearTimeout(timeout); + return receiveMessageOnPort(port).message; + }; + `; +} export async function load(url, context, next) { return next(url); From 297773c6d10c091ffb2a980615c21f742babe7f8 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Thu, 19 Jan 2023 16:10:25 +0900 Subject: [PATCH 185/191] events: change status of `event.returnvalue` to legacy `event.returnvalue` is described as legacy in spec. Plus, add missed '#'(private member) of defaultPrevented in implementation. Refs: https://dom.spec.whatwg.org/#interface-event Refs: https://developer.mozilla.org/en-US/docs/Web/API/Event/returnValue PR-URL: https://github.com/nodejs/node/pull/46175 Reviewed-By: James M Snell Reviewed-By: Antoine du Hamel --- doc/api/events.md | 4 ++++ lib/internal/event_target.js | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/api/events.md b/doc/api/events.md index 0b61c1add6f20e..104f2e22f2d541 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -2111,8 +2111,11 @@ Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. added: v14.5.0 --> +> Stability: 3 - Legacy: Use [`event.defaultPrevented`][] instead. + * Type: {boolean} True if the event has not been canceled. +The value of `event.returnValue` is always the opposite of `event.defaultPrevented`. This is not used in Node.js and is provided purely for completeness. #### `event.srcElement` @@ -2429,6 +2432,7 @@ to the `EventTarget`. [`emitter.listenerCount()`]: #emitterlistenercounteventname [`emitter.removeListener()`]: #emitterremovelistenereventname-listener [`emitter.setMaxListeners(n)`]: #emittersetmaxlistenersn +[`event.defaultPrevented`]: #eventdefaultprevented [`event.stopPropagation()`]: #eventstoppropagation [`event.target`]: #eventtarget [`events.defaultMaxListeners`]: #eventsdefaultmaxlisteners diff --git a/lib/internal/event_target.js b/lib/internal/event_target.js index 733636b5d7ae07..e75c09c49a7096 100644 --- a/lib/internal/event_target.js +++ b/lib/internal/event_target.js @@ -236,7 +236,7 @@ class Event { get returnValue() { if (!isEvent(this)) throw new ERR_INVALID_THIS('Event'); - return !this.defaultPrevented; + return !this.#cancelable || !this.#defaultPrevented; } /** From 0defe4effa82824ea340158d45e53cb57f940cbd Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Thu, 19 Jan 2023 18:08:38 +0900 Subject: [PATCH 186/191] trace_events: refactor to use `validateStringArray` `options.categories` is string[]. So used `validateStringArray` Refs: https://nodejs.org/dist/latest-v19.x/docs/api/tracing.html#trace_eventscreatetracingoptions PR-URL: https://github.com/nodejs/node/pull/46012 Reviewed-By: Luigi Pinca --- lib/trace_events.js | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/lib/trace_events.js b/lib/trace_events.js index 5211f8b0b1fc74..672095cec41a30 100644 --- a/lib/trace_events.js +++ b/lib/trace_events.js @@ -1,7 +1,6 @@ 'use strict'; const { - ArrayIsArray, ArrayPrototypeJoin, SafeSet, Symbol, @@ -17,7 +16,6 @@ const kMaxTracingCount = 10; const { ERR_TRACE_EVENTS_CATEGORY_REQUIRED, ERR_TRACE_EVENTS_UNAVAILABLE, - ERR_INVALID_ARG_TYPE } = require('internal/errors').codes; const { ownsProcessState } = require('internal/worker'); @@ -29,6 +27,7 @@ const { customInspectSymbol } = require('internal/util'); const { format } = require('internal/util/inspect'); const { validateObject, + validateStringArray, } = require('internal/validators'); const enabledTracingObjects = new SafeSet(); @@ -84,11 +83,7 @@ class Tracing { function createTracing(options) { validateObject(options, 'options'); - - if (!ArrayIsArray(options.categories)) { - throw new ERR_INVALID_ARG_TYPE('options.categories', 'string[]', - options.categories); - } + validateStringArray(options.categories, 'options.categories'); if (options.categories.length <= 0) throw new ERR_TRACE_EVENTS_CATEGORY_REQUIRED(); From a87963de6b852066dbb8fc3952cacab283b00c9d Mon Sep 17 00:00:00 2001 From: Debadree Chatterjee Date: Thu, 19 Jan 2023 14:38:50 +0530 Subject: [PATCH 187/191] stream: fix pipeline calling end on destination more than once Fixes: https://github.com/nodejs/node/issues/42866 PR-URL: https://github.com/nodejs/node/pull/46226 Reviewed-By: Luigi Pinca Reviewed-By: Robert Nagy Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Minwoo Jung --- lib/internal/streams/pipeline.js | 2 +- test/parallel/test-stream-pipeline.js | 35 +++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 77520a14d50a6f..b8a756330536c5 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -353,7 +353,7 @@ function pipe(src, dst, finish, { end }) { } }); - src.pipe(dst, { end }); + src.pipe(dst, { end: false }); // If end is true we already will have a listener to end dst. if (end) { // Compat. Before node v10.12.0 stdio used to throw an error so diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index 529b18386e25a6..65ef5164c14b4c 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -1556,3 +1556,38 @@ const tsp = require('timers/promises'); }) ); } + +{ + class CustomReadable extends Readable { + _read() { + this.push('asd'); + this.push(null); + } + } + + class CustomWritable extends Writable { + constructor() { + super(); + this.endCount = 0; + this.str = ''; + } + + _write(chunk, enc, cb) { + this.str += chunk; + cb(); + } + + end() { + this.endCount += 1; + super.end(); + } + } + + const readable = new CustomReadable(); + const writable = new CustomWritable(); + + pipeline(readable, writable, common.mustSucceed(() => { + assert.strictEqual(writable.str, 'asd'); + assert.strictEqual(writable.endCount, 1); + })); +} From de2b7a964027a02a99a9ccef99a1c1bddd07c673 Mon Sep 17 00:00:00 2001 From: Deokjin Kim Date: Thu, 19 Jan 2023 18:41:09 +0900 Subject: [PATCH 188/191] doc: fix mismatched arguments of `NodeEventTarget` Arguments of some APIs are mismatched and 2 APIs are not as described. PR-URL: https://github.com/nodejs/node/pull/45678 Reviewed-By: Benjamin Gruenbaum Reviewed-By: James M Snell --- doc/api/events.md | 51 +++++++++++++++++++++++++----------- lib/internal/event_target.js | 2 +- 2 files changed, 36 insertions(+), 17 deletions(-) diff --git a/doc/api/events.md b/doc/api/events.md index 104f2e22f2d541..7fe87d037e76ea 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1911,9 +1911,8 @@ and cannot be used in place of an `EventEmitter` in most cases. ignored. 2. The `NodeEventTarget` does not emulate the full `EventEmitter` API. Specifically the `prependListener()`, `prependOnceListener()`, - `rawListeners()`, `setMaxListeners()`, `getMaxListeners()`, and - `errorMonitor` APIs are not emulated. The `'newListener'` and - `'removeListener'` events will also not be emitted. + `rawListeners()`, and `errorMonitor` APIs are not emulated. + The `'newListener'` and `'removeListener'` events will also not be emitted. 3. The `NodeEventTarget` does not implement any special default behavior for events with type `'error'`. 4. The `NodeEventTarget` supports `EventListener` objects as well as @@ -2298,7 +2297,7 @@ added: v14.5.0 The `NodeEventTarget` is a Node.js-specific extension to `EventTarget` that emulates a subset of the `EventEmitter` API. -#### `nodeEventTarget.addListener(type, listener[, options])` +#### `nodeEventTarget.addListener(type, listener)` + +* `n` {number} + +Node.js-specific extension to the `EventTarget` class that sets the number +of max event listeners as `n`. + +#### `nodeEventTarget.getMaxListeners()` + + + +* Returns: {number} + +Node.js-specific extension to the `EventTarget` class that returns the number +of max event listeners. + +#### `nodeEventTarget.off(type, listener[, options])` * input {Buffer | ArrayBuffer | TypedArray} The input to validate. diff --git a/doc/api/http.md b/doc/api/http.md index bbefe1e49a8199..3d66acccbb97a1 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -1503,7 +1503,7 @@ added: - v11.3.0 - v10.14.0 changes: - - version: REPLACEME + - version: v18.14.0 pr-url: https://github.com/nodejs/node/pull/45778 description: The default is now set to the minimum between 60000 (60 seconds) or `requestTimeout`. --> @@ -2413,7 +2413,7 @@ as an argument to any listeners on the event. diff --git a/doc/api/os.md b/doc/api/os.md index e89d1bf23267e0..820c6992ae8e4f 100644 --- a/doc/api/os.md +++ b/doc/api/os.md @@ -29,7 +29,7 @@ The operating system-specific end-of-line marker. ## `os.availableParallelism()` * Returns: {integer} diff --git a/doc/changelogs/CHANGELOG_V18.md b/doc/changelogs/CHANGELOG_V18.md index fb6d4c80d6ec9a..2a6b06c50ae84c 100644 --- a/doc/changelogs/CHANGELOG_V18.md +++ b/doc/changelogs/CHANGELOG_V18.md @@ -9,6 +9,7 @@ +18.14.0
    18.13.0
    18.12.1
    18.12.0
    @@ -51,6 +52,292 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + + +## 2023-02-02, Version 18.14.0 'Hydrogen' (LTS), @BethGriggs prepared by @juanarbol + +### Notable changes + +#### Updated npm to 9.3.1 + +Based on the [list of guidelines we've established on integrating `npm` and `node`](https://github.com/npm/cli/wiki/Integrating-with-node), +here is a grouped list of the breaking changes with the reasoning as to why they +fit within the guidelines linked above. Note that all the breaking changes were +made in [9.0.0](https://github.com/npm/cli/releases/tag/v9.0.0). +All subsequent minor and patch releases after `npm@9.0.0` do not contain any +breaking changes. + +##### Engines + +> Explanation: the node engines supported by `npm@9` make it safe to allow `npm@9` as the default in any LTS version of `14` or `16`, as well as anything later than or including `18.0.0` + +* `npm` is now compatible with the following semver range for node: `^14.17.0 || ^16.13.0 || >=18.0.0` + +##### Filesystem + +> Explanation: when run as root previous versions of npm attempted to manage file ownership automatically on the user's behalf. this behavior was problematic in many cases and has been removed in favor of allowing users to manage their own filesystem permissions + +* `npm` will no longer attempt to modify ownership of files it creates. + +##### Auth + +> Explanation: any errors thrown from users having unsupported auth configurations will show `npm config fix` in the remediation instructions, which will allow the user to automatically have their auth config fixed. + +* The presence of auth related settings that are not scoped to a specific + registry found in a config file is no longer supported and will throw errors. + +##### Login + +> Explanation: the default `auth-type` has changed and users can opt back into the old behavior with `npm config set auth-type=legacy`. `login` and `adduser` have also been seperated making each command more closely match it's name instead of being aliases for each other. + +* Legacy auth types `sso`, `saml` & `legacy` have been consolidated into `"legacy"`. +* `auth-type` defaults to `"web"` +* `login` and `adduser` are now separate commands that send different data to the registry. +* `auth-type` config values `web` and `legacy` only try their respective methods, + npm no longer tries them all and waits to see which one doesn't fail. + +##### Tarball Packing + +> Explanation: previously using multiple ignore/allow lists when packing was an undefined behavior, and now the order of operations is strictly defined when packing a tarball making it easier to follow and should only affect users relying on the previously undefined behavior. + +* `npm pack` now follows a strict order of operations when applying ignore rules. + If a `files` array is present in the `package.json`, then rules in `.gitignore` + and `.npmignore` files from the root will be ignored. + +##### Display/Debug/Timing Info + +> Explanation: these changes center around the display of information to the terminal including timing and debug log info. We do not anticipate these changes breaking any existing workflows. + +* Links generated from git urls will now use `HEAD` instead of `master` as the default ref. +* `timing` has been removed as a value for `--loglevel`. +* `--timing` will show timing information regardless of `--loglevel`, except when `--silent`. +* When run with the `--timing` flag, `npm` now writes timing data to a file + alongside the debug log data, respecting the `logs-dir` option and falling + back to `/_logs/` dir, instead of directly inside the cache directory. +* The timing file data is no longer newline delimited JSON, and instead each run + will create a uniquely named `-timing.json` file, with the `` portion + being the same as the debug log. +* `npm` now outputs some json errors on stdout. Previously `npm` would output + all json formatted errors on stderr, making it difficult to parse as the + stderr stream usually has logs already written to it. + +##### Config/Command Deprecations or Removals + +> Explanation: `install-links` is the only config or command in the list that has an effect on package installs. We fixed a number of issues that came up during prereleases with this change. It will also only be applied to new package trees created without a package-lock.json file. Any install with an existing lock file will not be changed. + +* Deprecate boolean install flags in favor of `--install-strategy`. +* `npm config set` will no longer accept deprecated or invalid config options. +* `install-links` config defaults to `"true"`. +* `node-version` config has been removed. +* `npm-version` config has been removed. +* `npm access` subcommands have been renamed. +* `npm birthday` has been removed. +* `npm set-script` has been removed. +* `npm bin` has been removed (use `npx` or `npm exec` to execute binaries). + +#### Other notable changes + +* **doc**: + * add parallelism note to os.cpus() (Colin Ihrig) [#45895](https://github.com/nodejs/node/pull/45895) +* **http**: + * join authorization headers (Marco Ippolito) [#45982](https://github.com/nodejs/node/pull/45982) + * improved timeout defaults handling (Paolo Insogna) [#45778](https://github.com/nodejs/node/pull/45778) +* **stream**: + * implement finished() for ReadableStream and WritableStream (Debadree Chatterjee) [#46205](https://github.com/nodejs/node/pull/46205) + +### Commits + +* \[[`1352f08778`](https://github.com/nodejs/node/commit/1352f08778)] - **assert**: remove `assert.snapshot` (Moshe Atlow) [#46112](https://github.com/nodejs/node/pull/46112) +* \[[`4ee3238643`](https://github.com/nodejs/node/commit/4ee3238643)] - **async\_hooks**: refactor to use `validateObject` (Deokjin Kim) [#46004](https://github.com/nodejs/node/pull/46004) +* \[[`79e0bf9b64`](https://github.com/nodejs/node/commit/79e0bf9b64)] - **benchmark**: include webstreams benchmark (Rafael Gonzaga) [#45876](https://github.com/nodejs/node/pull/45876) +* \[[`ed1ac82469`](https://github.com/nodejs/node/commit/ed1ac82469)] - **benchmark,tools**: use os.availableParallelism() (Deokjin Kim) [#46003](https://github.com/nodejs/node/pull/46003) +* \[[`16ee02f2eb`](https://github.com/nodejs/node/commit/16ee02f2eb)] - **(SEMVER-MINOR)** **buffer**: add buffer.isUtf8 for utf8 validation (Yagiz Nizipli) [#45947](https://github.com/nodejs/node/pull/45947) +* \[[`3bf2371a57`](https://github.com/nodejs/node/commit/3bf2371a57)] - **build**: add extra semi check (Jiawen Geng) [#46194](https://github.com/nodejs/node/pull/46194) +* \[[`560ee24157`](https://github.com/nodejs/node/commit/560ee24157)] - **build**: fix arm64 cross-compile from powershell (Stefan Stojanovic) [#45890](https://github.com/nodejs/node/pull/45890) +* \[[`48e3ad3aca`](https://github.com/nodejs/node/commit/48e3ad3aca)] - **build**: add option to disable shared readonly heap (Anna Henningsen) [#45887](https://github.com/nodejs/node/pull/45887) +* \[[`52a7887b94`](https://github.com/nodejs/node/commit/52a7887b94)] - **(SEMVER-MINOR)** **crypto**: add CryptoKey Symbol.toStringTag (Filip Skokan) [#46042](https://github.com/nodejs/node/pull/46042) +* \[[`a558774a40`](https://github.com/nodejs/node/commit/a558774a40)] - **crypto**: add cipher update/final methods encoding validation (vitpavlenko) [#45990](https://github.com/nodejs/node/pull/45990) +* \[[`599d1dc841`](https://github.com/nodejs/node/commit/599d1dc841)] - **crypto**: ensure auth tag set for chacha20-poly1305 (Ben Noordhuis) [#46185](https://github.com/nodejs/node/pull/46185) +* \[[`24a101698c`](https://github.com/nodejs/node/commit/24a101698c)] - **crypto**: return correct bit length in KeyObject's asymmetricKeyDetails (Filip Skokan) [#46106](https://github.com/nodejs/node/pull/46106) +* \[[`2de50fef84`](https://github.com/nodejs/node/commit/2de50fef84)] - **(SEMVER-MINOR)** **crypto**: add KeyObject Symbol.toStringTag (Filip Skokan) [#46043](https://github.com/nodejs/node/pull/46043) +* \[[`782b6f6f9f`](https://github.com/nodejs/node/commit/782b6f6f9f)] - **crypto**: ensure exported webcrypto EC keys use uncompressed point format (Ben Noordhuis) [#46021](https://github.com/nodejs/node/pull/46021) +* \[[`7a97f3f43b`](https://github.com/nodejs/node/commit/7a97f3f43b)] - **crypto**: fix CryptoKey prototype WPT (Filip Skokan) [#45857](https://github.com/nodejs/node/pull/45857) +* \[[`1a8aa50aa2`](https://github.com/nodejs/node/commit/1a8aa50aa2)] - **crypto**: fix CryptoKey WebIDL conformance (Filip Skokan) [#45855](https://github.com/nodejs/node/pull/45855) +* \[[`c6436450ee`](https://github.com/nodejs/node/commit/c6436450ee)] - **crypto**: fix error when getRandomValues is called without arguments (Filip Skokan) [#45854](https://github.com/nodejs/node/pull/45854) +* \[[`4cdf0002c5`](https://github.com/nodejs/node/commit/4cdf0002c5)] - **debugger**: refactor console in lib/internal/debugger/inspect.js (Debadree Chatterjee) [#45847](https://github.com/nodejs/node/pull/45847) +* \[[`b7fe8c70fa`](https://github.com/nodejs/node/commit/b7fe8c70fa)] - **deps**: update simdutf to 3.1.0 (Node.js GitHub Bot) [#46257](https://github.com/nodejs/node/pull/46257) +* \[[`eaeb870cd7`](https://github.com/nodejs/node/commit/eaeb870cd7)] - **deps**: upgrade npm to 9.3.1 (npm team) [#46242](https://github.com/nodejs/node/pull/46242) +* \[[`7c03a3d676`](https://github.com/nodejs/node/commit/7c03a3d676)] - **deps**: upgrade npm to 9.3.0 (npm team) [#46193](https://github.com/nodejs/node/pull/46193) +* \[[`340d76accb`](https://github.com/nodejs/node/commit/340d76accb)] - **deps**: cherrypick simdutf patch (Jiawen Geng) [#46194](https://github.com/nodejs/node/pull/46194) +* \[[`cce2af4306`](https://github.com/nodejs/node/commit/cce2af4306)] - **deps**: bump googletest to 2023.01.13 (Jiawen Geng) [#46198](https://github.com/nodejs/node/pull/46198) +* \[[`d251a66bed`](https://github.com/nodejs/node/commit/d251a66bed)] - **deps**: add /deps/\*\*/.github/ to .gitignore (Luigi Pinca) [#46091](https://github.com/nodejs/node/pull/46091) +* \[[`874054f469`](https://github.com/nodejs/node/commit/874054f469)] - **deps**: add simdutf version to metadata (Mike Roth) [#46145](https://github.com/nodejs/node/pull/46145) +* \[[`2497702b82`](https://github.com/nodejs/node/commit/2497702b82)] - **deps**: update simdutf to 2.1.0 (Node.js GitHub Bot) [#46128](https://github.com/nodejs/node/pull/46128) +* \[[`c8492b7f4c`](https://github.com/nodejs/node/commit/c8492b7f4c)] - **deps**: update corepack to 0.15.3 (Node.js GitHub Bot) [#46037](https://github.com/nodejs/node/pull/46037) +* \[[`d148f357fd`](https://github.com/nodejs/node/commit/d148f357fd)] - **deps**: update simdutf to 2.0.9 (Node.js GitHub Bot) [#45975](https://github.com/nodejs/node/pull/45975) +* \[[`422a98199c`](https://github.com/nodejs/node/commit/422a98199c)] - **deps**: update to uvwasi 0.0.14 (Colin Ihrig) [#45970](https://github.com/nodejs/node/pull/45970) +* \[[`7812752db0`](https://github.com/nodejs/node/commit/7812752db0)] - **deps**: fix updater github workflow job (Yagiz Nizipli) [#45972](https://github.com/nodejs/node/pull/45972) +* \[[`4063cdcef6`](https://github.com/nodejs/node/commit/4063cdcef6)] - _**Revert**_ "**deps**: disable avx512 for simutf on benchmark ci" (Yagiz Nizipli) [#45948](https://github.com/nodejs/node/pull/45948) +* \[[`64d3e3f3ba`](https://github.com/nodejs/node/commit/64d3e3f3ba)] - **deps**: disable avx512 for simutf on benchmark ci (Yagiz Nizipli) [#45803](https://github.com/nodejs/node/pull/45803) +* \[[`c9845fc334`](https://github.com/nodejs/node/commit/c9845fc334)] - **deps**: add simdutf dependency (Yagiz Nizipli) [#45803](https://github.com/nodejs/node/pull/45803) +* \[[`6963c96547`](https://github.com/nodejs/node/commit/6963c96547)] - **deps**: update timezone to 2022g (Node.js GitHub Bot) [#45731](https://github.com/nodejs/node/pull/45731) +* \[[`874f6c340b`](https://github.com/nodejs/node/commit/874f6c340b)] - **deps**: update undici to 5.14.0 (Node.js GitHub Bot) [#45812](https://github.com/nodejs/node/pull/45812) +* \[[`7599b913d5`](https://github.com/nodejs/node/commit/7599b913d5)] - **deps**: upgrade npm to 9.2.0 (npm team) [#45780](https://github.com/nodejs/node/pull/45780) +* \[[`4844935ff3`](https://github.com/nodejs/node/commit/4844935ff3)] - **deps**: upgrade npm to 9.1.3 (npm team) [#45693](https://github.com/nodejs/node/pull/45693) +* \[[`8dce62c7fe`](https://github.com/nodejs/node/commit/8dce62c7fe)] - **deps**: V8: cherry-pick 5fe919f78321 (Richard Lau) [#45587](https://github.com/nodejs/node/pull/45587) +* \[[`8de642517e`](https://github.com/nodejs/node/commit/8de642517e)] - **dgram**: sync the old handle state to new handle (theanarkh) [#46041](https://github.com/nodejs/node/pull/46041) +* \[[`de2b7a9640`](https://github.com/nodejs/node/commit/de2b7a9640)] - **doc**: fix mismatched arguments of `NodeEventTarget` (Deokjin Kim) [#45678](https://github.com/nodejs/node/pull/45678) +* \[[`6317502d10`](https://github.com/nodejs/node/commit/6317502d10)] - **doc**: update events API example to have runnable code (Deokjin Kim) [#45760](https://github.com/nodejs/node/pull/45760) +* \[[`a9db45eee1`](https://github.com/nodejs/node/commit/a9db45eee1)] - **doc**: add note to tls docs about secureContext availability (Tim Gerk) [#46224](https://github.com/nodejs/node/pull/46224) +* \[[`5294371063`](https://github.com/nodejs/node/commit/5294371063)] - **doc**: add text around collaborative expectations (Michael Dawson) [#46121](https://github.com/nodejs/node/pull/46121) +* \[[`be85d5a6eb`](https://github.com/nodejs/node/commit/be85d5a6eb)] - **doc**: update to match changed `--dns-result-order` default (Mordy Tikotzky) [#46148](https://github.com/nodejs/node/pull/46148) +* \[[`4f2d9ea6da`](https://github.com/nodejs/node/commit/4f2d9ea6da)] - **doc**: add Node-API media link (Kevin Eady) [#46189](https://github.com/nodejs/node/pull/46189) +* \[[`9bfd40466f`](https://github.com/nodejs/node/commit/9bfd40466f)] - **doc**: update http.setMaxIdleHTTPParsers arguments (Debadree Chatterjee) [#46168](https://github.com/nodejs/node/pull/46168) +* \[[`d7a8c076e1`](https://github.com/nodejs/node/commit/d7a8c076e1)] - **doc**: use "file system" instead of "filesystem" (Rich Trott) [#46178](https://github.com/nodejs/node/pull/46178) +* \[[`e54483cd2b`](https://github.com/nodejs/node/commit/e54483cd2b)] - **doc**: https update default request timeout (Marco Ippolito) [#46184](https://github.com/nodejs/node/pull/46184) +* \[[`335110b0fb`](https://github.com/nodejs/node/commit/335110b0fb)] - **doc**: make options of readableStream.pipeTo as optional (Deokjin Kim) [#46180](https://github.com/nodejs/node/pull/46180) +* \[[`ec34cad712`](https://github.com/nodejs/node/commit/ec34cad712)] - **doc**: add PerformanceObserver.supportedEntryTypes to doc (theanarkh) [#45962](https://github.com/nodejs/node/pull/45962) +* \[[`d0f905bd6f`](https://github.com/nodejs/node/commit/d0f905bd6f)] - **doc**: duplex and readable from uncaught execption warning (Marco Ippolito) [#46135](https://github.com/nodejs/node/pull/46135) +* \[[`512feaafa4`](https://github.com/nodejs/node/commit/512feaafa4)] - **doc**: remove outdated sections from `maintaining-v8` (Antoine du Hamel) [#46137](https://github.com/nodejs/node/pull/46137) +* \[[`849a3e2ce7`](https://github.com/nodejs/node/commit/849a3e2ce7)] - **doc**: fix (EC)DHE remark in TLS docs (Tobias Nießen) [#46114](https://github.com/nodejs/node/pull/46114) +* \[[`a3c9c1b4e6`](https://github.com/nodejs/node/commit/a3c9c1b4e6)] - **doc**: fix ERR\_TLS\_RENEGOTIATION\_DISABLED text (Tobias Nießen) [#46122](https://github.com/nodejs/node/pull/46122) +* \[[`1834e94ebb`](https://github.com/nodejs/node/commit/1834e94ebb)] - **doc**: fix spelling in SECURITY.md (Vaishno Chaitanya) [#46124](https://github.com/nodejs/node/pull/46124) +* \[[`3968698af5`](https://github.com/nodejs/node/commit/3968698af5)] - **doc**: abort controller emits error in child process (Debadree Chatterjee) [#46072](https://github.com/nodejs/node/pull/46072) +* \[[`1ec14c2c61`](https://github.com/nodejs/node/commit/1ec14c2c61)] - **doc**: fix `event.cancelBubble` documentation (Deokjin Kim) [#45986](https://github.com/nodejs/node/pull/45986) +* \[[`5539977f80`](https://github.com/nodejs/node/commit/5539977f80)] - **doc**: add personal pronouns option (Filip Skokan) [#46118](https://github.com/nodejs/node/pull/46118) +* \[[`1fabef3a81`](https://github.com/nodejs/node/commit/1fabef3a81)] - **doc**: mention how to run ncu-ci citgm (Rafael Gonzaga) [#46090](https://github.com/nodejs/node/pull/46090) +* \[[`84dc65ab87`](https://github.com/nodejs/node/commit/84dc65ab87)] - **doc**: include updating release optional step (Rafael Gonzaga) [#46089](https://github.com/nodejs/node/pull/46089) +* \[[`76c7ea1e74`](https://github.com/nodejs/node/commit/76c7ea1e74)] - **doc**: describe argument of `Symbol.for` (Deokjin Kim) [#46019](https://github.com/nodejs/node/pull/46019) +* \[[`2307a74990`](https://github.com/nodejs/node/commit/2307a74990)] - **doc**: update isUtf8 description (Yagiz Nizipli) [#45973](https://github.com/nodejs/node/pull/45973) +* \[[`fa5b65ea24`](https://github.com/nodejs/node/commit/fa5b65ea24)] - **doc**: use console.error for error case in timers and tls (Deokjin Kim) [#46002](https://github.com/nodejs/node/pull/46002) +* \[[`29d509c100`](https://github.com/nodejs/node/commit/29d509c100)] - **doc**: fix wrong output of example in `url.protocol` (Deokjin Kim) [#45954](https://github.com/nodejs/node/pull/45954) +* \[[`61dbca2690`](https://github.com/nodejs/node/commit/61dbca2690)] - **doc**: use `os.availableParallelism()` in async\_context and cluster (Deokjin Kim) [#45979](https://github.com/nodejs/node/pull/45979) +* \[[`86b2c8cea2`](https://github.com/nodejs/node/commit/86b2c8cea2)] - **doc**: make EventEmitterAsyncResource's `options` as optional (Deokjin Kim) [#45985](https://github.com/nodejs/node/pull/45985) +* \[[`335acf7748`](https://github.com/nodejs/node/commit/335acf7748)] - **doc**: replace single executable champion in strategic initiatives doc (Darshan Sen) [#45956](https://github.com/nodejs/node/pull/45956) +* \[[`aab35a9388`](https://github.com/nodejs/node/commit/aab35a9388)] - **doc**: update error message of example in repl (Deokjin Kim) [#45920](https://github.com/nodejs/node/pull/45920) +* \[[`53a94a95ff`](https://github.com/nodejs/node/commit/53a94a95ff)] - **doc**: fix typos in packages.md (Eric Mutta) [#45957](https://github.com/nodejs/node/pull/45957) +* \[[`83875f46cf`](https://github.com/nodejs/node/commit/83875f46cf)] - **doc**: remove port from example in `url.hostname` (Deokjin Kim) [#45927](https://github.com/nodejs/node/pull/45927) +* \[[`162d3a94e3`](https://github.com/nodejs/node/commit/162d3a94e3)] - **doc**: show output of example in http (Deokjin Kim) [#45915](https://github.com/nodejs/node/pull/45915) +* \[[`53684e4506`](https://github.com/nodejs/node/commit/53684e4506)] - **(SEMVER-MINOR)** **doc**: add parallelism note to os.cpus() (Colin Ihrig) [#45895](https://github.com/nodejs/node/pull/45895) +* \[[`546e083d36`](https://github.com/nodejs/node/commit/546e083d36)] - **doc**: fix wrong output of example in `url.password` (Deokjin Kim) [#45928](https://github.com/nodejs/node/pull/45928) +* \[[`14c95ecd23`](https://github.com/nodejs/node/commit/14c95ecd23)] - **doc**: fix some history entries in `deprecations.md` (Antoine du Hamel) [#45891](https://github.com/nodejs/node/pull/45891) +* \[[`d94dba973b`](https://github.com/nodejs/node/commit/d94dba973b)] - **doc**: add tip for NODE\_MODULE (theanarkh) [#45797](https://github.com/nodejs/node/pull/45797) +* \[[`662f574c5b`](https://github.com/nodejs/node/commit/662f574c5b)] - **doc**: reduce likelihood of mismerges during release (Richard Lau) [#45864](https://github.com/nodejs/node/pull/45864) +* \[[`48ea28aa30`](https://github.com/nodejs/node/commit/48ea28aa30)] - **doc**: add backticks to webcrypto rsaOaepParams (Filip Skokan) [#45883](https://github.com/nodejs/node/pull/45883) +* \[[`726b285163`](https://github.com/nodejs/node/commit/726b285163)] - **doc**: remove release cleanup step (Michaël Zasso) [#45858](https://github.com/nodejs/node/pull/45858) +* \[[`5eb93f1de9`](https://github.com/nodejs/node/commit/5eb93f1de9)] - **doc**: add stream/promises pipeline and finished to doc (Marco Ippolito) [#45832](https://github.com/nodejs/node/pull/45832) +* \[[`f874d0ba74`](https://github.com/nodejs/node/commit/f874d0ba74)] - **doc**: remove Juan Jose keys (Rafael Gonzaga) [#45827](https://github.com/nodejs/node/pull/45827) +* \[[`67efe2a55e`](https://github.com/nodejs/node/commit/67efe2a55e)] - **doc**: fix wrong output of example in util (Deokjin Kim) [#45825](https://github.com/nodejs/node/pull/45825) +* \[[`b709af31e0`](https://github.com/nodejs/node/commit/b709af31e0)] - **doc**: sort http.createServer() options alphabetically (Luigi Pinca) [#45680](https://github.com/nodejs/node/pull/45680) +* \[[`ebe292113a`](https://github.com/nodejs/node/commit/ebe292113a)] - **doc,crypto**: fix WebCryptoAPI import keyData and export return (Filip Skokan) [#46076](https://github.com/nodejs/node/pull/46076) +* \[[`204757719c`](https://github.com/nodejs/node/commit/204757719c)] - **errors**: refactor to use a method that formats a list string (Daeyeon Jeong) [#45793](https://github.com/nodejs/node/pull/45793) +* \[[`463bb9602e`](https://github.com/nodejs/node/commit/463bb9602e)] - **esm**: mark `importAssertions` as required (Antoine du Hamel) [#46164](https://github.com/nodejs/node/pull/46164) +* \[[`0bdf2db079`](https://github.com/nodejs/node/commit/0bdf2db079)] - **esm**: rewrite loader hooks test (Geoffrey Booth) [#46016](https://github.com/nodejs/node/pull/46016) +* \[[`297773c6d1`](https://github.com/nodejs/node/commit/297773c6d1)] - **events**: change status of `event.returnvalue` to legacy (Deokjin Kim) [#46175](https://github.com/nodejs/node/pull/46175) +* \[[`d088d6e5c3`](https://github.com/nodejs/node/commit/d088d6e5c3)] - **events**: change status of `event.cancelBubble` to legacy (Deokjin Kim) [#46146](https://github.com/nodejs/node/pull/46146) +* \[[`36be0c4ee2`](https://github.com/nodejs/node/commit/36be0c4ee2)] - **events**: change status of `event.srcElement` to legacy (Deokjin Kim) [#46085](https://github.com/nodejs/node/pull/46085) +* \[[`b239f0684a`](https://github.com/nodejs/node/commit/b239f0684a)] - **events**: fix violation of symbol naming convention (Deokjin Kim) [#45978](https://github.com/nodejs/node/pull/45978) +* \[[`aec340b312`](https://github.com/nodejs/node/commit/aec340b312)] - **fs**: refactor to use `validateInteger` (Deokjin Kim) [#46008](https://github.com/nodejs/node/pull/46008) +* \[[`e620de6444`](https://github.com/nodejs/node/commit/e620de6444)] - **http**: refactor to use `validateHeaderName` (Deokjin Kim) [#46143](https://github.com/nodejs/node/pull/46143) +* \[[`3e70b7d863`](https://github.com/nodejs/node/commit/3e70b7d863)] - **http**: writeHead if statusmessage is undefined dont override headers (Marco Ippolito) [#46173](https://github.com/nodejs/node/pull/46173) +* \[[`3d1dd96c4f`](https://github.com/nodejs/node/commit/3d1dd96c4f)] - **http**: refactor to use min of validateNumber for maxTotalSockets (Deokjin Kim) [#46115](https://github.com/nodejs/node/pull/46115) +* \[[`4df1fcc9db`](https://github.com/nodejs/node/commit/4df1fcc9db)] - **(SEMVER-MINOR)** **http**: join authorization headers (Marco Ippolito) [#45982](https://github.com/nodejs/node/pull/45982) +* \[[`8c06e2f645`](https://github.com/nodejs/node/commit/8c06e2f645)] - **http**: replace `var` with `const` on code of comment (Deokjin Kim) [#45951](https://github.com/nodejs/node/pull/45951) +* \[[`3c0c5e0567`](https://github.com/nodejs/node/commit/3c0c5e0567)] - **(SEMVER-MINOR)** **http**: improved timeout defaults handling (Paolo Insogna) [#45778](https://github.com/nodejs/node/pull/45778) +* \[[`edcd4fc576`](https://github.com/nodejs/node/commit/edcd4fc576)] - **lib**: use kEmptyObject and update JSDoc in webstreams (Deokjin Kim) [#46183](https://github.com/nodejs/node/pull/46183) +* \[[`d6fc855b8a`](https://github.com/nodejs/node/commit/d6fc855b8a)] - **lib**: refactor to use validate function (Deokjin Kim) [#46101](https://github.com/nodejs/node/pull/46101) +* \[[`bc17f37b98`](https://github.com/nodejs/node/commit/bc17f37b98)] - **lib**: reuse invalid state errors on webstreams (Rafael Gonzaga) [#46086](https://github.com/nodejs/node/pull/46086) +* \[[`86554bf27c`](https://github.com/nodejs/node/commit/86554bf27c)] - **lib**: fix incorrect use of console intrinsic (Colin Ihrig) [#46044](https://github.com/nodejs/node/pull/46044) +* \[[`7fc7b19124`](https://github.com/nodejs/node/commit/7fc7b19124)] - **lib**: update JSDoc of `getOwnPropertyValueOrDefault` (Deokjin Kim) [#46010](https://github.com/nodejs/node/pull/46010) +* \[[`c1cc1f9e12`](https://github.com/nodejs/node/commit/c1cc1f9e12)] - **lib**: use `kEmptyObject` as default value for options (Deokjin Kim) [#46011](https://github.com/nodejs/node/pull/46011) +* \[[`db617222da`](https://github.com/nodejs/node/commit/db617222da)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#46215](https://github.com/nodejs/node/pull/46215) +* \[[`10afecd0d8`](https://github.com/nodejs/node/commit/10afecd0d8)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#46130](https://github.com/nodejs/node/pull/46130) +* \[[`d8ce990ce6`](https://github.com/nodejs/node/commit/d8ce990ce6)] - **meta**: update comment in `CODEOWNERS` to better reflect current policy (Antoine du Hamel) [#45944](https://github.com/nodejs/node/pull/45944) +* \[[`e3f0194168`](https://github.com/nodejs/node/commit/e3f0194168)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#46040](https://github.com/nodejs/node/pull/46040) +* \[[`d31c478929`](https://github.com/nodejs/node/commit/d31c478929)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#45968](https://github.com/nodejs/node/pull/45968) +* \[[`10a276a3e0`](https://github.com/nodejs/node/commit/10a276a3e0)] - **meta**: add `nodejs/loaders` to CODEOWNERS (Geoffrey Booth) [#45940](https://github.com/nodejs/node/pull/45940) +* \[[`56247d7c87`](https://github.com/nodejs/node/commit/56247d7c87)] - **meta**: add `nodejs/test_runner` to CODEOWNERS (Antoine du Hamel) [#45935](https://github.com/nodejs/node/pull/45935) +* \[[`3bef8bc743`](https://github.com/nodejs/node/commit/3bef8bc743)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#45899](https://github.com/nodejs/node/pull/45899) +* \[[`baf30ee935`](https://github.com/nodejs/node/commit/baf30ee935)] - **module**: fix unintended mutation (Antoine du Hamel) [#46108](https://github.com/nodejs/node/pull/46108) +* \[[`3ad584c357`](https://github.com/nodejs/node/commit/3ad584c357)] - **net**: handle socket.write(cb) edge case (Santiago Gimeno) [#45922](https://github.com/nodejs/node/pull/45922) +* \[[`2ab35cf0cc`](https://github.com/nodejs/node/commit/2ab35cf0cc)] - **node-api**: disambiguate napi\_add\_finalizer (Chengzhong Wu) [#45401](https://github.com/nodejs/node/pull/45401) +* \[[`6e9676e986`](https://github.com/nodejs/node/commit/6e9676e986)] - **node-api**: generalize finalizer second pass callback (Chengzhong Wu) [#44141](https://github.com/nodejs/node/pull/44141) +* \[[`b2faceff0a`](https://github.com/nodejs/node/commit/b2faceff0a)] - **(SEMVER-MINOR)** **os**: add availableParallelism() (Colin Ihrig) [#45895](https://github.com/nodejs/node/pull/45895) +* \[[`8fac4c5684`](https://github.com/nodejs/node/commit/8fac4c5684)] - **perf\_hooks**: fix checking range of `options.figures` in createHistogram (Deokjin Kim) [#45999](https://github.com/nodejs/node/pull/45999) +* \[[`ea73702847`](https://github.com/nodejs/node/commit/ea73702847)] - **process,worker**: ensure code after exit() effectless (ywave620) [#45620](https://github.com/nodejs/node/pull/45620) +* \[[`784ed594ea`](https://github.com/nodejs/node/commit/784ed594ea)] - **repl**: improve robustness wrt to prototype pollution (Antoine du Hamel) [#45604](https://github.com/nodejs/node/pull/45604) +* \[[`fcfde3412e`](https://github.com/nodejs/node/commit/fcfde3412e)] - **src**: rename internal module declaration as internal bindings (Chengzhong Wu) [#45551](https://github.com/nodejs/node/pull/45551) +* \[[`646cadccd0`](https://github.com/nodejs/node/commit/646cadccd0)] - **src**: fix endianness of simdutf (Yagiz Nizipli) [#46257](https://github.com/nodejs/node/pull/46257) +* \[[`94605b1665`](https://github.com/nodejs/node/commit/94605b1665)] - **src**: replace unreachable code with static\_assert (Tobias Nießen) [#46209](https://github.com/nodejs/node/pull/46209) +* \[[`3ce39bbcb7`](https://github.com/nodejs/node/commit/3ce39bbcb7)] - **src**: hide kMaxDigestMultiplier outside HKDF impl (Tobias Nießen) [#46206](https://github.com/nodejs/node/pull/46206) +* \[[`9648b06e09`](https://github.com/nodejs/node/commit/9648b06e09)] - **src**: distinguish env stopping flags (Chengzhong Wu) [#45907](https://github.com/nodejs/node/pull/45907) +* \[[`53ecd20bbd`](https://github.com/nodejs/node/commit/53ecd20bbd)] - **src**: remove return after abort (Shelley Vohr) [#46172](https://github.com/nodejs/node/pull/46172) +* \[[`c4c8931b9d`](https://github.com/nodejs/node/commit/c4c8931b9d)] - **src**: remove unnecessary semicolons (Shelley Vohr) [#46171](https://github.com/nodejs/node/pull/46171) +* \[[`fab72b1677`](https://github.com/nodejs/node/commit/fab72b1677)] - **src**: use simdutf for converting externalized builtins to UTF-16 (Anna Henningsen) [#46119](https://github.com/nodejs/node/pull/46119) +* \[[`67729961e7`](https://github.com/nodejs/node/commit/67729961e7)] - **src**: use constant strings for memory info names (Chengzhong Wu) [#46087](https://github.com/nodejs/node/pull/46087) +* \[[`0ac4e5dd34`](https://github.com/nodejs/node/commit/0ac4e5dd34)] - **src**: fix typo in node\_snapshotable.cc (Vadim) [#46103](https://github.com/nodejs/node/pull/46103) +* \[[`b454a7665d`](https://github.com/nodejs/node/commit/b454a7665d)] - **src**: keep PipeWrap::Open function consistent with TCPWrap (theanarkh) [#46064](https://github.com/nodejs/node/pull/46064) +* \[[`41f5a29cca`](https://github.com/nodejs/node/commit/41f5a29cca)] - **src**: speed up process.getActiveResourcesInfo() (Darshan Sen) [#46014](https://github.com/nodejs/node/pull/46014) +* \[[`02a61dd6bd`](https://github.com/nodejs/node/commit/02a61dd6bd)] - **src**: fix typo in `node_file.cc` (Vadim) [#45998](https://github.com/nodejs/node/pull/45998) +* \[[`99c033ed98`](https://github.com/nodejs/node/commit/99c033ed98)] - **src**: fix crash on OnStreamRead on Windows (Santiago Gimeno) [#45878](https://github.com/nodejs/node/pull/45878) +* \[[`27d6a8b2b1`](https://github.com/nodejs/node/commit/27d6a8b2b1)] - **src**: fix creating `Isolate`s from addons (Anna Henningsen) [#45885](https://github.com/nodejs/node/pull/45885) +* \[[`9ca31cdba3`](https://github.com/nodejs/node/commit/9ca31cdba3)] - **src**: use string\_view for FastStringKey implementation (Anna Henningsen) [#45914](https://github.com/nodejs/node/pull/45914) +* \[[`e4fc3abfd5`](https://github.com/nodejs/node/commit/e4fc3abfd5)] - **src**: fix UB in overflow checks (Ben Noordhuis) [#45882](https://github.com/nodejs/node/pull/45882) +* \[[`574afac26a`](https://github.com/nodejs/node/commit/574afac26a)] - **src**: check size of args before using for exec\_path (A. Wilcox) [#45902](https://github.com/nodejs/node/pull/45902) +* \[[`f0692468cd`](https://github.com/nodejs/node/commit/f0692468cd)] - **src**: fix tls certificate root store data race (Ben Noordhuis) [#45767](https://github.com/nodejs/node/pull/45767) +* \[[`a749ceda2e`](https://github.com/nodejs/node/commit/a749ceda2e)] - **src**: add undici and acorn to `process.versions` (Debadree Chatterjee) [#45621](https://github.com/nodejs/node/pull/45621) +* \[[`08a6a61575`](https://github.com/nodejs/node/commit/08a6a61575)] - **src,lib**: the handle keeps loop alive in cluster rr mode (theanarkh) [#46161](https://github.com/nodejs/node/pull/46161) +* \[[`a87963de6b`](https://github.com/nodejs/node/commit/a87963de6b)] - **stream**: fix pipeline calling end on destination more than once (Debadree Chatterjee) [#46226](https://github.com/nodejs/node/pull/46226) +* \[[`cde59606cd`](https://github.com/nodejs/node/commit/cde59606cd)] - **(SEMVER-MINOR)** **stream**: implement finished() for ReadableStream and WritableStream (Debadree Chatterjee) [#46205](https://github.com/nodejs/node/pull/46205) +* \[[`441d9de33e`](https://github.com/nodejs/node/commit/441d9de33e)] - **stream**: refactor to use `validateFunction` (Deokjin Kim) [#46007](https://github.com/nodejs/node/pull/46007) +* \[[`325fc08d48`](https://github.com/nodejs/node/commit/325fc08d48)] - **stream**: fix typo in JSDoc (Deokjin Kim) [#45991](https://github.com/nodejs/node/pull/45991) +* \[[`536322fa1c`](https://github.com/nodejs/node/commit/536322fa1c)] - **test**: update postject to 1.0.0-alpha.4 (Node.js GitHub Bot) [#46212](https://github.com/nodejs/node/pull/46212) +* \[[`a3056f4125`](https://github.com/nodejs/node/commit/a3056f4125)] - **test**: refactor to avoid mutation of global by a loader (Michaël Zasso) [#46220](https://github.com/nodejs/node/pull/46220) +* \[[`1790569518`](https://github.com/nodejs/node/commit/1790569518)] - **test**: improve test coverage for WHATWG `TextDecoder` (Juan José) [#45241](https://github.com/nodejs/node/pull/45241) +* \[[`896027c006`](https://github.com/nodejs/node/commit/896027c006)] - **test**: add fix so that test exits if port 42 is unprivileged (Suyash Nayan) [#45904](https://github.com/nodejs/node/pull/45904) +* \[[`257224da0e`](https://github.com/nodejs/node/commit/257224da0e)] - **test**: use `os.availableParallelism()` (Deokjin Kim) [#46003](https://github.com/nodejs/node/pull/46003) +* \[[`7e1462dd02`](https://github.com/nodejs/node/commit/7e1462dd02)] - **test**: update Web Events WPT (Deokjin Kim) [#46051](https://github.com/nodejs/node/pull/46051) +* \[[`40d52fbc5f`](https://github.com/nodejs/node/commit/40d52fbc5f)] - **test**: add test to once() in event lib (Jonathan Diaz) [#46126](https://github.com/nodejs/node/pull/46126) +* \[[`f3518f3337`](https://github.com/nodejs/node/commit/f3518f3337)] - **test**: use `process.hrtime.bigint` instead of `process.hrtime` (Deokjin Kim) [#45877](https://github.com/nodejs/node/pull/45877) +* \[[`4d6dd10464`](https://github.com/nodejs/node/commit/4d6dd10464)] - **test**: print failed JS/parallel tests (Geoffrey Booth) [#45960](https://github.com/nodejs/node/pull/45960) +* \[[`7cb6fef6d6`](https://github.com/nodejs/node/commit/7cb6fef6d6)] - **test**: fix test broken under --node-builtin-modules-path (Geoffrey Booth) [#45894](https://github.com/nodejs/node/pull/45894) +* \[[`55e4140c34`](https://github.com/nodejs/node/commit/55e4140c34)] - **test**: fix mock.method to support class instances (Erick Wendel) [#45608](https://github.com/nodejs/node/pull/45608) +* \[[`286acaa6fe`](https://github.com/nodejs/node/commit/286acaa6fe)] - **test**: update encoding wpt to latest (Yagiz Nizipli) [#45850](https://github.com/nodejs/node/pull/45850) +* \[[`22c1e918ce`](https://github.com/nodejs/node/commit/22c1e918ce)] - **test**: update url wpt to latest (Yagiz Nizipli) [#45852](https://github.com/nodejs/node/pull/45852) +* \[[`5fa6a70bbd`](https://github.com/nodejs/node/commit/5fa6a70bbd)] - **test**: add CryptoKey transferring tests (Filip Skokan) [#45811](https://github.com/nodejs/node/pull/45811) +* \[[`4aaec07266`](https://github.com/nodejs/node/commit/4aaec07266)] - **test**: add postject to fixtures (Darshan Sen) [#45298](https://github.com/nodejs/node/pull/45298) +* \[[`da78f9cbb8`](https://github.com/nodejs/node/commit/da78f9cbb8)] - **test,crypto**: update WebCryptoAPI WPT (Filip Skokan) [#45860](https://github.com/nodejs/node/pull/45860) +* \[[`3269423032`](https://github.com/nodejs/node/commit/3269423032)] - **test,esm**: validate more edge cases for dynamic imports (Antoine du Hamel) [#46059](https://github.com/nodejs/node/pull/46059) +* \[[`cade2fccf4`](https://github.com/nodejs/node/commit/cade2fccf4)] - **test\_runner**: run t.after() if test body throws (Colin Ihrig) [#45870](https://github.com/nodejs/node/pull/45870) +* \[[`87a0e86604`](https://github.com/nodejs/node/commit/87a0e86604)] - **test\_runner**: parse yaml (Moshe Atlow) [#45815](https://github.com/nodejs/node/pull/45815) +* \[[`757a022443`](https://github.com/nodejs/node/commit/757a022443)] - **tls**: don't treat fatal TLS alerts as EOF (David Benjamin) [#44563](https://github.com/nodejs/node/pull/44563) +* \[[`c6457cbf8d`](https://github.com/nodejs/node/commit/c6457cbf8d)] - **tls**: fix re-entrancy issue with TLS close\_notify (David Benjamin) [#44563](https://github.com/nodejs/node/pull/44563) +* \[[`fcca2d5ea6`](https://github.com/nodejs/node/commit/fcca2d5ea6)] - **tools**: update lint-md-dependencies (Node.js GitHub Bot) [#46214](https://github.com/nodejs/node/pull/46214) +* \[[`09adb86c19`](https://github.com/nodejs/node/commit/09adb86c19)] - **tools**: fix macro name in update-undici (Almeida) [#46217](https://github.com/nodejs/node/pull/46217) +* \[[`1b0cc79785`](https://github.com/nodejs/node/commit/1b0cc79785)] - **tools**: add automation for updating postject dependency (Darshan Sen) [#46157](https://github.com/nodejs/node/pull/46157) +* \[[`38df662119`](https://github.com/nodejs/node/commit/38df662119)] - **tools**: update create-or-update-pull-request-action (Michaël Zasso) [#46169](https://github.com/nodejs/node/pull/46169) +* \[[`3f4c0c0de1`](https://github.com/nodejs/node/commit/3f4c0c0de1)] - **tools**: update eslint to 8.31.0 (Node.js GitHub Bot) [#46131](https://github.com/nodejs/node/pull/46131) +* \[[`f3dc4329e6`](https://github.com/nodejs/node/commit/f3dc4329e6)] - **tools**: update lint-md-dependencies to rollup\@3.9.1 (Node.js GitHub Bot) [#46129](https://github.com/nodejs/node/pull/46129) +* \[[`fafbd1ca72`](https://github.com/nodejs/node/commit/fafbd1ca72)] - **tools**: move update-eslint.sh to dep\_updaters/ (Luigi Pinca) [#46088](https://github.com/nodejs/node/pull/46088) +* \[[`609df01fa9`](https://github.com/nodejs/node/commit/609df01fa9)] - **tools**: make update-eslint.sh work with npm\@9 (Luigi Pinca) [#46088](https://github.com/nodejs/node/pull/46088) +* \[[`31b8cf1a4d`](https://github.com/nodejs/node/commit/31b8cf1a4d)] - **tools**: fix lint rule recommendation (Colin Ihrig) [#46044](https://github.com/nodejs/node/pull/46044) +* \[[`0a80cbdcb1`](https://github.com/nodejs/node/commit/0a80cbdcb1)] - **tools**: update lint-md-dependencies to rollup\@3.9.0 (Node.js GitHub Bot) [#46039](https://github.com/nodejs/node/pull/46039) +* \[[`18503fa7ba`](https://github.com/nodejs/node/commit/18503fa7ba)] - **tools**: update doc to unist-util-select\@4.0.2 (Node.js GitHub Bot) [#46038](https://github.com/nodejs/node/pull/46038) +* \[[`b48e82ec1d`](https://github.com/nodejs/node/commit/b48e82ec1d)] - **tools**: add release host var to promotion script (Ruy Adorno) [#45913](https://github.com/nodejs/node/pull/45913) +* \[[`3b93b0c1f5`](https://github.com/nodejs/node/commit/3b93b0c1f5)] - **tools**: add url to `AUTHORS` update automation (Antoine du Hamel) [#45971](https://github.com/nodejs/node/pull/45971) +* \[[`623b0eba81`](https://github.com/nodejs/node/commit/623b0eba81)] - **tools**: update lint-md-dependencies to rollup\@3.8.1 (Node.js GitHub Bot) [#45967](https://github.com/nodejs/node/pull/45967) +* \[[`b0e88377fe`](https://github.com/nodejs/node/commit/b0e88377fe)] - **tools**: update GitHub workflow action (Mohammed Keyvanzadeh) [#45937](https://github.com/nodejs/node/pull/45937) +* \[[`974442e69d`](https://github.com/nodejs/node/commit/974442e69d)] - **tools**: update lint-md dependencies (Node.js GitHub Bot) [#45813](https://github.com/nodejs/node/pull/45813) +* \[[`5aaa8c3bbf`](https://github.com/nodejs/node/commit/5aaa8c3bbf)] - **tools**: enforce use of trailing commas in `tools/` (Antoine du Hamel) [#45889](https://github.com/nodejs/node/pull/45889) +* \[[`1e32520f72`](https://github.com/nodejs/node/commit/1e32520f72)] - **tools**: add `ArrayPrototypeConcat` to the list of primordials to avoid (Antoine du Hamel) [#44445](https://github.com/nodejs/node/pull/44445) +* \[[`e0cda56204`](https://github.com/nodejs/node/commit/e0cda56204)] - **tools**: fix incorrect version history order (Fabien Michel) [#45728](https://github.com/nodejs/node/pull/45728) +* \[[`7438ff175a`](https://github.com/nodejs/node/commit/7438ff175a)] - **tools**: update eslint to 8.29.0 (Node.js GitHub Bot) [#45733](https://github.com/nodejs/node/pull/45733) +* \[[`1e11247b91`](https://github.com/nodejs/node/commit/1e11247b91)] - _**Revert**_ "**tools**: update V8 gypfiles for RISC-V" (Lu Yahan) [#46156](https://github.com/nodejs/node/pull/46156) +* \[[`0defe4effa`](https://github.com/nodejs/node/commit/0defe4effa)] - **trace\_events**: refactor to use `validateStringArray` (Deokjin Kim) [#46012](https://github.com/nodejs/node/pull/46012) +* \[[`f1dcbe7652`](https://github.com/nodejs/node/commit/f1dcbe7652)] - **util**: add fast path for text-decoder fatal flag (Yagiz Nizipli) [#45803](https://github.com/nodejs/node/pull/45803) +* \[[`277d9da876`](https://github.com/nodejs/node/commit/277d9da876)] - **vm**: refactor to use validate function (Deokjin Kim) [#46176](https://github.com/nodejs/node/pull/46176) +* \[[`96f1b2e731`](https://github.com/nodejs/node/commit/96f1b2e731)] - **vm**: refactor to use `validateStringArray` (Deokjin Kim) [#46020](https://github.com/nodejs/node/pull/46020) + ## 2023-01-05, Version 18.13.0 'Hydrogen' (LTS), @danielleadams diff --git a/src/node_version.h b/src/node_version.h index 7d09542f931de3..c42b070a336728 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 18 -#define NODE_MINOR_VERSION 13 -#define NODE_PATCH_VERSION 1 +#define NODE_MINOR_VERSION 14 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 1 #define NODE_VERSION_LTS_CODENAME "Hydrogen" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)