diff --git a/.babelrc b/.babelrc index 6bb6dad639..d1c5b6cd49 100644 --- a/.babelrc +++ b/.babelrc @@ -52,6 +52,7 @@ "stage-0" ], "plugins": [ + ["babel-plugin-inline-import", { "extensions": [ ".tpl.js" ] }], ["transform-inline-imports-commonjs"], ["transform-runtime", { "polyfill": false, "regenerator": true }] ] diff --git a/.gitignore b/.gitignore index f91f6d103d..921789e0c1 100644 --- a/.gitignore +++ b/.gitignore @@ -21,5 +21,7 @@ test/fixtures/**/.fbkpm /__tests__/fixtures/request-cache/GET/localhost/.bin .idea .yarn-meta +.pnp.js +.pnp /packages/lockfile/index.js .vscode/ diff --git a/__tests__/commands/install/integration.js b/__tests__/commands/install/integration.js index 4ef36f0614..cbc63a1efa 100644 --- a/__tests__/commands/install/integration.js +++ b/__tests__/commands/install/integration.js @@ -236,7 +236,7 @@ test('changes the cache path when bumping the cache version', () => await mockConstants(config, {CACHE_VERSION: 42}, async config => { await cache(config, reporter, {}, ['dir']); - expect((JSON.parse(String(inOut.read())): any).data).toMatch(/[\\\/]v42[\\\/]?$/); + expect((JSON.parse(String(inOut.read())): any).data).toMatch(/[\\\/]v42([\\\/].*)?$/); }); })); diff --git a/__tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-empty/.yarn-metadata.json b/__tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-empty/node_modules/corrupted-meta-empty/.yarn-metadata.json similarity index 100% rename from __tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-empty/.yarn-metadata.json rename to __tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-empty/node_modules/corrupted-meta-empty/.yarn-metadata.json diff --git a/__tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-not-existing/.gitkeep b/__tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-not-existing/node_modules/corrupted-meta-not-existing/.gitkeep similarity index 100% rename from __tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-not-existing/.gitkeep rename to __tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-not-existing/node_modules/corrupted-meta-not-existing/.gitkeep diff --git a/__tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-typo/.yarn-metadata.json b/__tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-typo/node_modules/corrupted-meta-typo/.yarn-metadata.json similarity index 100% rename from __tests__/fixtures/cache/corrupted/.yarn-cache/v2/corrupted-meta-typo/.yarn-metadata.json rename to __tests__/fixtures/cache/corrupted/.yarn-cache/v3/corrupted-meta-typo/node_modules/corrupted-meta-typo/.yarn-metadata.json diff --git a/__tests__/fixtures/cache/corrupted/.yarn-cache/v2/good/.yarn-metadata.json b/__tests__/fixtures/cache/corrupted/.yarn-cache/v3/good/node_modules/good/.yarn-metadata.json similarity index 100% rename from __tests__/fixtures/cache/corrupted/.yarn-cache/v2/good/.yarn-metadata.json rename to __tests__/fixtures/cache/corrupted/.yarn-cache/v3/good/node_modules/good/.yarn-metadata.json diff --git a/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/angular/core/-/core-2.4.10.tgz.bin b/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/angular/core/-/core-2.4.10.tgz.bin index 46e6ab0e34..9378b1cea2 100644 Binary files a/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/angular/core/-/core-2.4.10.tgz.bin and b/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/angular/core/-/core-2.4.10.tgz.bin differ diff --git a/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/types/node/-/node-10.3.2.tgz.bin b/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/types/node/-/node-10.3.2.tgz.bin new file mode 100644 index 0000000000..d4e53d14e4 Binary files /dev/null and b/__tests__/fixtures/request-cache/GET/registry.yarnpkg.com/types/node/-/node-10.3.2.tgz.bin differ diff --git a/__tests__/integration.js b/__tests__/integration.js index c1b753e6ed..2703d371ad 100644 --- a/__tests__/integration.js +++ b/__tests__/integration.js @@ -506,6 +506,7 @@ test('relative cache folder', async () => { const [stdoutOutput, _] = await runYarn(['cache', 'dir'], {cwd: `${base}/sub`}); + // The dirname is to remove the "v2" part expect(await fs.realpath(path.dirname(stdoutOutput.toString()))).toEqual(await fs.realpath(`${base}/foo`)); }); diff --git a/__tests__/lifecycle-scripts.js b/__tests__/lifecycle-scripts.js index c9a8c22982..31cdfd25fa 100644 --- a/__tests__/lifecycle-scripts.js +++ b/__tests__/lifecycle-scripts.js @@ -48,31 +48,31 @@ async function execCommand(cmd: string, packageName: string, env = process.env): test.concurrent('should add the global yarnrc arguments to the command line', async () => { const stdout = await execCommand('cache dir', 'yarnrc-cli'); - expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+\n$/); + expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+(\/.*)?\n$/); }); test.concurrent( 'should add the command-specific yarnrc arguments to the command line if the command name matches', async () => { const stdout = await execCommand('cache dir', 'yarnrc-cli-command-specific-ok'); - expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+\n$/); + expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+(\/.*)?\n$/); }, ); test.concurrent("should not add the command-specific yarnrc arguments if the command name doesn't match", async () => { const stdout = await execCommand('cache dir', 'yarnrc-cli-command-specific-ko'); - expect(stdout.replace(/\\/g, '/')).not.toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+\n$/); + expect(stdout.replace(/\\/g, '/')).not.toMatch(/^(C:)?\/tmp\/foobar\/v[0-9]+(\/.*)?\n$/); }); test.concurrent('should allow overriding the yarnrc values from the command line', async () => { const stdout = await execCommand('cache dir --cache-folder /tmp/toto', 'yarnrc-cli'); - expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/toto\/v[0-9]+\n$/); + expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?\/tmp\/toto\/v[0-9]+(\/.*)?\n$/); }); // Test disabled for now, cf rc.js test.concurrent('should resolve the yarnrc values relative to where the file lives', async () => { const stdout = await execCommand('cache dir', 'yarnrc-cli-relative'); - expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?(\/[^\/]+)+\/foobar\/hello\/world\/v[0-9]+\n$/); + expect(stdout.replace(/\\/g, '/')).toMatch(/^(C:)?(\/[^\/]+)+\/foobar\/hello\/world\/v[0-9]+(\/.*)?\n$/); }); test.concurrent( diff --git a/__tests__/package-resolver.js b/__tests__/package-resolver.js index 87279c91cb..269c1b614d 100644 --- a/__tests__/package-resolver.js +++ b/__tests__/package-resolver.js @@ -13,8 +13,8 @@ jasmine.DEFAULT_TIMEOUT_INTERVAL = 90000; const path = require('path'); -// regexp which verifies that cache path contains semver + hash -const cachePathRe = /-\d+\.\d+\.\d+-[\dabcdef]{40}$/; +// regexp which verifies that the cache path contains a path component ending with semver + hash +const cachePathRe = /-\d+\.\d+\.\d+-[\dabcdef]{40}[\\\/]/; async function createEnv(configOptions): Object { const lockfile = new Lockfile(); @@ -82,7 +82,7 @@ addTest( '@foo/bar@1.2.3', 'npm', async cacheFolder => { - const folder = path.join(cacheFolder, 'npm-@foo', 'bar'); + const folder = path.join(cacheFolder, 'npm-@foo-bar', 'node_modules', '@foo', 'bar'); await fs.mkdirp(folder); await fs.writeFile( path.join(folder, constants.METADATA_FILENAME), diff --git a/package.json b/package.json index 136c4da4f7..520153562c 100644 --- a/package.json +++ b/package.json @@ -57,6 +57,7 @@ "babel-eslint": "^7.2.3", "babel-loader": "^6.2.5", "babel-plugin-array-includes": "^2.0.3", + "babel-plugin-inline-import": "^2.0.6", "babel-plugin-transform-builtin-extend": "^1.1.2", "babel-plugin-transform-inline-imports-commonjs": "^1.0.0", "babel-plugin-transform-runtime": "^6.4.3", diff --git a/packages/pkg-tests/package.json b/packages/pkg-tests/package.json index 8d25170b69..f3bc9a3682 100644 --- a/packages/pkg-tests/package.json +++ b/packages/pkg-tests/package.json @@ -10,7 +10,7 @@ "babel-preset-env": "^1.6.1", "babel-preset-flow": "^6.23.0", "flow-bin": "^0.66.0", - "jest": "^22.3.0", + "jest": "^23.0.0", "prettier": "^1.10.2" }, "scripts": { diff --git a/packages/pkg-tests/pkg-tests-core/sources/utils/fs.js b/packages/pkg-tests/pkg-tests-core/sources/utils/fs.js index 5ba29aa737..a39f4a9058 100644 --- a/packages/pkg-tests/pkg-tests-core/sources/utils/fs.js +++ b/packages/pkg-tests/pkg-tests-core/sources/utils/fs.js @@ -174,8 +174,12 @@ exports.readJson = async function readJson(source: string): Promise { } }; -exports.chmod = function chmod(target: string, mod: number): Promise { - return fs.chmod(target, mod); +exports.chmod = async function chmod(target: string, mod: number): Promise { + await fs.chmod(target, mod); +}; + +exports.realpath = function realpath(source: string): Promise { + return fs.realpath(source); }; exports.makeFakeBinary = async function( diff --git a/packages/pkg-tests/pkg-tests-core/sources/utils/tests.js b/packages/pkg-tests/pkg-tests-core/sources/utils/tests.js index df06acf79a..f1a9e20132 100644 --- a/packages/pkg-tests/pkg-tests-core/sources/utils/tests.js +++ b/packages/pkg-tests/pkg-tests-core/sources/utils/tests.js @@ -22,6 +22,17 @@ export type PackageRunDriver = ( export type PackageDriver = any; +let whitelist = new Map(); + +exports.setPackageWhitelist = async function whitelistPackages( + packages: Map>, + fn: () => Promise, +) { + whitelist = packages; + await fn(); + whitelist = new Map(); +}; + exports.getPackageRegistry = function getPackageRegistry(): Promise { if (getPackageRegistry.promise) { return getPackageRegistry.promise; @@ -182,7 +193,12 @@ exports.startPackageServer = function startPackageServer(): Promise { return processError(res, 404, `Package not found: ${name}`); } - const versions = Array.from(packageEntry.keys()); + let versions = Array.from(packageEntry.keys()); + + const whitelistedVersions = whitelist.get(name); + if (whitelistedVersions) { + versions = versions.filter(version => whitelistedVersions.has(version)); + } const data = JSON.stringify({ name, @@ -300,7 +316,7 @@ exports.generatePkgDriver = function generatePkgDriver({runDriver}: {|runDriver: } return async function(): Promise { - const path = await fsUtils.createTemporaryFolder(); + const path = await fsUtils.realpath(await fsUtils.createTemporaryFolder()); const registryUrl = await exports.startPackageServer(); @@ -308,21 +324,34 @@ exports.generatePkgDriver = function generatePkgDriver({runDriver}: {|runDriver: await fsUtils.writeJson(`${path}/package.json`, await deepResolve(packageJson)); const run = (...args) => { + let callDefinition = {}; + + if (args.length > 0 && typeof args[args.length - 1] === 'object') { + callDefinition = args.pop(); + } + return runDriver(path, args, { registryUrl, + ...definition, ...subDefinition, + ...callDefinition, }); }; const source = async script => { - return JSON.parse((await run('node', '-p', `JSON.stringify(${script})`)).stdout.toString()); + return JSON.parse((await run('node', '-p', `JSON.stringify((() => ${script})())`)).stdout.toString()); }; - await fn({ - path, - run, - source, - }); + try { + await fn({ + path, + run, + source, + }); + } catch (error) { + error.message = `Temporary fixture folder: ${path}\n\n` + error.message; + throw error; + } }; }; diff --git a/packages/pkg-tests/pkg-tests-fixtures/default-index.js b/packages/pkg-tests/pkg-tests-fixtures/default-index.js index 1aa13242b9..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/default-index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/default-index.js @@ -2,7 +2,9 @@ module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - // $FlowFixMe The whole point of this file is to be dynamic - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-entry-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-entry-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-entry-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-entry-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-exit-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-exit-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-exit-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dep-loop-exit-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dev-deps-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dev-deps-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dev-deps-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dev-deps-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-a-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-a-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-a-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-a-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-2.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-2.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-2.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-b-2.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-c-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-c-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-c-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-c-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-d-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-d-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-d-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-d-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-e-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-e-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-e-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/dragon-test-1-e-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-get-pwd.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-get-pwd.js new file mode 100644 index 0000000000..5e4016f4d1 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-get-pwd.js @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +console.log(process.cwd()); diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-relative-require.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-relative-require.js new file mode 100644 index 0000000000..3edc61061a --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-relative-require.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +const secret = require('./secret'); + +console.log(secret); diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-require.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-require.js new file mode 100755 index 0000000000..0bee57337a --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin-with-require.js @@ -0,0 +1,6 @@ +#!/usr/bin/env node + +const noDeps = require('no-deps'); + +console.log(noDeps.name); +console.log(noDeps.version); diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin.js new file mode 100755 index 0000000000..7e9cf01609 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/bin.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +for (let t = 2; t < process.argv.length; ++t) { + console.log(process.argv[t]); +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/package.json new file mode 100644 index 0000000000..56d66a579a --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/package.json @@ -0,0 +1,13 @@ +{ + "name": "has-bin-entries", + "version": "1.0.0", + "bin": { + "has-bin-entries": "./bin.js", + "has-bin-entries-with-require": "./bin-with-require.js", + "has-bin-entries-with-relative-require": "./bin-with-relative-require.js", + "has-bin-entries-get-pwd": "./bin-get-pwd.js" + }, + "dependencies": { + "no-deps": "1.0.0" + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/secret.js b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/secret.js new file mode 100644 index 0000000000..888cae37af --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/has-bin-entries-1.0.0/secret.js @@ -0,0 +1 @@ +module.exports = 42; diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-child-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-child-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-child-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-child-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-parent-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-parent-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-parent-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/hoisting-peer-check-parent-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.1/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.1/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.1/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.0.1/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.1.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.1.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.1.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-1.1.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-2.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-2.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-2.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-2.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-2.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-2.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-2.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-bins-2.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-checked-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-checked-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-checked-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-checked-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-failing-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-failing-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-failing-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-failing-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/log.js b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/log.js new file mode 100644 index 0000000000..e0a30c5dfa --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/log.js @@ -0,0 +1 @@ +module.exports = []; diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/package.json index 327c56811d..294c8b4d37 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/package.json +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/no-deps-scripted-1.0.0/package.json @@ -2,8 +2,8 @@ "name": "no-deps-scripted", "version": "1.0.0", "scripts": { - "preinstall": "echo preinstall >> log", - "install": "echo install >> log", - "postinstall": "echo postinstall >> log" + "preinstall": "echo 'module.exports.push(100);' >> log.js", + "install": "echo 'module.exports.push(200);' >> log.js", + "postinstall": "echo 'module.exports.push(300);' >> log.js; echo 'module.exports = '\"$(node -p 'Math.floor(Math.random() * 512000)')\"';' > rnd.js" } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep1-dep-bins/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep1-dep-bins/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep1-dep-bins/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep1-dep-bins/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep2-dep-bins-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep2-dep-bins-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep2-dep-bins-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-deep2-dep-bins-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-0.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-0.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-0.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-0.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-2.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-2.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-2.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-bins-2.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-checked-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-checked-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-checked-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-checked-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-scripted-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-scripted-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-scripted-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-fixed-dep-scripted-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/one-range-dep-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/one-range-dep-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/one-range-dep-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/one-range-dep-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/package.json new file mode 100644 index 0000000000..89fa70330a --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/peer-deps-1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "peer-deps", + "version": "1.0.0", + "peerDependencies": { + "no-deps": "*" + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/package.json new file mode 100644 index 0000000000..dad66deee3 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-1-0-0-1.0.0/package.json @@ -0,0 +1,8 @@ +{ + "name": "provides-peer-deps-1-0-0", + "version": "1.0.0", + "dependencies": { + "peer-deps": "1.0.0", + "no-deps": "1.0.0" + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/package.json new file mode 100644 index 0000000000..194a121d1a --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/provides-peer-deps-2-0-0-1.0.0/package.json @@ -0,0 +1,8 @@ +{ + "name": "provides-peer-deps-2-0-0", + "version": "1.0.0", + "dependencies": { + "peer-deps": "1.0.0", + "no-deps": "2.0.0" + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/package.json new file mode 100644 index 0000000000..b80acfc0e1 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/package.json @@ -0,0 +1,7 @@ +{ + "name": "self-require-trap", + "version": "1.0.0", + "dependencies": { + "self-require-trap": "2.0.0" + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/self.js b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/self.js new file mode 100644 index 0000000000..4b240ff44b --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-1.0.0/self.js @@ -0,0 +1 @@ +module.exports = require('self-require-trap'); diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/index.js new file mode 100644 index 0000000000..a6bf8f5865 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/index.js @@ -0,0 +1,10 @@ +/* @flow */ + +module.exports = require(`./package.json`); + +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/package.json b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/package.json new file mode 100644 index 0000000000..62b31dfeff --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/self-require-trap-2.0.0/package.json @@ -0,0 +1,4 @@ +{ + "name": "self-require-trap", + "version": "2.0.0" +} diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/index.js b/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/index.js index b375424a4e..a6bf8f5865 100644 --- a/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/index.js +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/index.js @@ -1,5 +1,10 @@ +/* @flow */ + module.exports = require(`./package.json`); -for (const key of Object.keys(module.exports.dependencies || {})) { - module.exports.dependencies[key] = require(key); +for (const key of [`dependencies`, `devDependencies`, `peerDependencies`]) { + for (const dep of Object.keys(module.exports[key] || {})) { + // $FlowFixMe The whole point of this file is to be dynamic + module.exports[key][dep] = require(dep); + } } diff --git a/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/self.js b/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/self.js new file mode 100644 index 0000000000..c9733c6ca3 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-fixtures/packages/various-requires-1.0.0/self.js @@ -0,0 +1 @@ +module.exports = require('various-requires'); diff --git a/packages/pkg-tests/pkg-tests-specs/package.json b/packages/pkg-tests/pkg-tests-specs/package.json index 1967b62856..5ef9b6ab96 100644 --- a/packages/pkg-tests/pkg-tests-specs/package.json +++ b/packages/pkg-tests/pkg-tests-specs/package.json @@ -3,6 +3,7 @@ "version": "1.0.0", "main": "./sources/index.js", "dependencies": { + "fs-extra": "^7.0.0", "pkg-tests-core": "1.0.0" } } diff --git a/packages/pkg-tests/pkg-tests-specs/sources/basic.js b/packages/pkg-tests/pkg-tests-specs/sources/basic.js index 21eda5dea8..c958a72176 100644 --- a/packages/pkg-tests/pkg-tests-specs/sources/basic.js +++ b/packages/pkg-tests/pkg-tests-specs/sources/basic.js @@ -2,7 +2,10 @@ import type {PackageDriver} from 'pkg-tests-core'; -const {tests: {getPackageArchivePath, getPackageHttpArchivePath, getPackageDirectoryPath}} = require('pkg-tests-core'); +const { + fs: {createTemporaryFolder, writeFile, writeJson}, + tests: {getPackageArchivePath, getPackageHttpArchivePath, getPackageDirectoryPath}, +} = require('pkg-tests-core'); module.exports = (makeTemporaryEnv: PackageDriver) => { describe(`Basic tests`, () => { @@ -224,5 +227,113 @@ module.exports = (makeTemporaryEnv: PackageDriver) => { }, ), ); + + test( + `it should install in such a way that peer dependencies can be resolved (from top-level)`, + makeTemporaryEnv( + { + dependencies: {[`peer-deps`]: `1.0.0`, [`no-deps`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('peer-deps')`)).resolves.toMatchObject({ + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }); + }, + ), + ); + + test( + `it should install in such a way that peer dependencies can be resolved (from within a dependency)`, + makeTemporaryEnv( + { + dependencies: {[`provides-peer-deps-1-0-0`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('provides-peer-deps-1-0-0')`)).resolves.toMatchObject({ + name: `provides-peer-deps-1-0-0`, + version: `1.0.0`, + dependencies: { + [`peer-deps`]: { + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }, + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }); + }, + ), + ); + + test( + `it should cache the loaded modules`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source( + `{ let before = require('no-deps/package.json'); let after = require('no-deps/package.json'); return before === after }`, + ), + ).resolves.toEqual(true); + }, + ), + ); + + test( + `it should expose the cached modules into require.cache`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source(`require('no-deps') === require.cache[require.resolve('no-deps')].exports`), + ).resolves.toEqual(true); + }, + ), + ); + + test( + `it should allow resetting a loaded module by deleting its entry from require.cache`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source( + `{ let before = require('no-deps/package.json'); delete require.cache[require.resolve('no-deps/package.json')]; let after = require('no-deps/package.json'); return before === after }`, + ), + ).resolves.toEqual(false); + }, + ), + ); }); }; diff --git a/packages/pkg-tests/pkg-tests-specs/sources/dragon.js b/packages/pkg-tests/pkg-tests-specs/sources/dragon.js index 8c67b07dae..4d897dd677 100644 --- a/packages/pkg-tests/pkg-tests-specs/sources/dragon.js +++ b/packages/pkg-tests/pkg-tests-specs/sources/dragon.js @@ -2,6 +2,8 @@ import type {PackageDriver} from 'pkg-tests-core'; +const {fs: {writeFile, writeJson}} = require('pkg-tests-core'); + // Here be dragons. The biggest and baddest tests, that just can't be described in a single line of summary. Because // of this, they each must be clearly documented and explained. // @@ -19,7 +21,7 @@ module.exports = (makeTemporaryEnv: PackageDriver) => { [`dragon-test-1-e`]: `1.0.0`, }, }, - async ({path, run}) => { + async ({path, run, source}) => { // This test assumes the following: // // . -> D@1.0.0 -> C@1.0.0 -> B@1.0.0 -> A@1.0.0 @@ -54,5 +56,69 @@ module.exports = (makeTemporaryEnv: PackageDriver) => { }, ), ); + + test( + `it should pass the dragon test 2`, + makeTemporaryEnv( + { + private: true, + workspaces: [`dragon-test-2-a`, `dragon-test-2-b`], + dependencies: { + [`dragon-test-2-a`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + // This test assumes the following: + // + // . -> A@workspace -> B@workspace -> no-deps@* (peer dep) + // -> no-deps@1.0.0 + // + // In this situation, the implementation might register the workspaces one by + // one, going through all their dependencies before moving to the next one. + // Because the workspace B is also a dependency of the workspace A, it will be + // traversed a first time as a dependency of A, and then a second time as a + // workspace. + // + // A problem is when B also has peer dependencies, like in the setup described + // above. In this case, the Yarn implementation of PnP needs to generate a virtual + // package for B (in order to deambiguate the dependencies), and register it while + // processing A. Then later, when iterating over B, it is possible that the + // workspace registration overwrites the previously registered virtual dependency, + // making it unavailable whilst still being referenced in the dependencies of A. + // + // This test ensures that A can always require B. + + await writeJson(`${path}/dragon-test-2-a/package.json`, { + name: `dragon-test-2-a`, + version: `1.0.0`, + dependencies: { + [`dragon-test-2-b`]: `1.0.0`, + [`no-deps`]: `1.0.0`, + }, + }); + + await writeJson(`${path}/dragon-test-2-b/package.json`, { + name: `dragon-test-2-b`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: `*`, + }, + }); + + await writeFile(`${path}/dragon-test-2-a/index.js`, `module.exports = require('dragon-test-2-b')`); + await writeFile(`${path}/dragon-test-2-b/index.js`, `module.exports = require('no-deps')`); + + await run(`install`); + + await expect(source(`require("dragon-test-2-a")`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); }); }; diff --git a/packages/pkg-tests/pkg-tests-specs/sources/index.js b/packages/pkg-tests/pkg-tests-specs/sources/index.js index ad02e73284..c2ac091cba 100644 --- a/packages/pkg-tests/pkg-tests-specs/sources/index.js +++ b/packages/pkg-tests/pkg-tests-specs/sources/index.js @@ -2,4 +2,7 @@ exports.basic = require('./basic'); exports.dragon = require('./dragon'); +exports.lock = require('./lock'); +exports.pnp = require('./pnp'); exports.script = require('./script'); +exports.workspace = require('./workspace'); diff --git a/packages/pkg-tests/pkg-tests-specs/sources/lock.js b/packages/pkg-tests/pkg-tests-specs/sources/lock.js new file mode 100644 index 0000000000..2c6ffc1355 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-specs/sources/lock.js @@ -0,0 +1,30 @@ +/* @flow */ + +import type {PackageDriver} from 'pkg-tests-core'; + +const {fs: {writeFile, writeJson}, tests: {setPackageWhitelist}} = require('pkg-tests-core'); + +module.exports = (makeTemporaryEnv: PackageDriver) => { + describe(`Lock tests`, () => { + test( + `it should correctly lock dependencies`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `^1.0.0`}, + }, + async ({path, run, source}) => { + await setPackageWhitelist(new Map([[`no-deps`, new Set([`1.0.0`])]]), async () => { + await run(`install`); + }); + await setPackageWhitelist(new Map([[`no-deps`, new Set([`1.0.0`, `1.1.0`])]]), async () => { + await run(`install`, `-f`); + }); + await expect(source(`require('no-deps')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + }); +}; diff --git a/packages/pkg-tests/pkg-tests-specs/sources/pnp.js b/packages/pkg-tests/pkg-tests-specs/sources/pnp.js new file mode 100644 index 0000000000..7ef06f1c84 --- /dev/null +++ b/packages/pkg-tests/pkg-tests-specs/sources/pnp.js @@ -0,0 +1,1226 @@ +const cp = require('child_process'); +const {existsSync, statSync, stat, rename, readdir, remove} = require('fs-extra'); +const {relative, isAbsolute} = require('path'); + +const { + fs: {createTemporaryFolder, readFile, readJson, writeFile, writeJson}, + tests: {getPackageDirectoryPath}, +} = require('pkg-tests-core'); + +module.exports = makeTemporaryEnv => { + const { + basic: basicSpecs, + lock: lockSpecs, + script: scriptSpecs, + workspace: workspaceSpecs, + } = require('pkg-tests-specs'); + + describe(`Plug'n'Play`, () => { + basicSpecs( + makeTemporaryEnv.withConfig({ + plugNPlay: true, + }), + ); + + lockSpecs( + makeTemporaryEnv.withConfig({ + plugNPlay: true, + }), + ); + + scriptSpecs( + makeTemporaryEnv.withConfig({ + plugNPlay: true, + }), + ); + + workspaceSpecs( + makeTemporaryEnv.withConfig({ + plugNPlay: true, + }), + ); + + test( + `it should not use pnp when setting the override to false`, + makeTemporaryEnv({}, {plugNPlay: false}, async ({path, run, source}) => { + await run(`install`); + + expect(existsSync(`${path}/.pnp.js`)).toEqual(false); + }), + ); + + test( + `it should not touch the .pnp.js file when it already exists and is up-to-date`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + const beforeTime = (await stat(`${path}/.pnp.js`)).mtimeMs; + + // Need to wait two seconds to be sure that the mtime will change + await new Promise(resolve => setTimeout(resolve, 2000)); + + await run(`install`); + + const afterTime = (await stat(`${path}/.pnp.js`)).mtimeMs; + + expect(afterTime).toEqual(beforeTime); + }, + ), + ); + + test( + `it should update the .pnp.js file when it already exists but isn't up-to-date`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + const beforeTime = (await stat(`${path}/.pnp.js`)).mtimeMs; + + await writeJson(`${path}/package.json`, { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }); + + // Need to wait two seconds to be sure that the mtime will change + await new Promise(resolve => setTimeout(resolve, 2000)); + + await run(`install`); + + const afterTime = (await stat(`${path}/.pnp.js`)).mtimeMs; + + expect(afterTime).not.toEqual(beforeTime); + }, + ), + ); + + test( + `it should resolve two identical packages with the same object (easy)`, + makeTemporaryEnv( + { + dependencies: { + [`one-fixed-dep-1`]: getPackageDirectoryPath(`one-fixed-dep`, `1.0.0`), + [`one-fixed-dep-2`]: getPackageDirectoryPath(`one-fixed-dep`, `1.0.0`), + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source(`require('one-fixed-dep-1').dependencies['no-deps'] === require('no-deps')`), + ).resolves.toEqual(true); + await expect( + source(`require('one-fixed-dep-2').dependencies['no-deps'] === require('no-deps')`), + ).resolves.toEqual(true); + }, + ), + ); + + test( + `it should resolve two identical packages with the same object (complex)`, + makeTemporaryEnv( + { + dependencies: { + [`one-fixed-dep-1`]: getPackageDirectoryPath(`one-fixed-dep`, `1.0.0`), + [`one-fixed-dep-2`]: getPackageDirectoryPath(`one-fixed-dep`, `1.0.0`), + [`no-deps`]: `2.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source( + `require('one-fixed-dep-1').dependencies['no-deps'] === require('one-fixed-dep-2').dependencies['no-deps']`, + ), + ).resolves.toEqual(true); + + await expect( + source(`require('one-fixed-dep-1').dependencies['no-deps'] !== require('no-deps')`), + ).resolves.toEqual(true); + await expect( + source(`require('one-fixed-dep-2').dependencies['no-deps'] !== require('no-deps')`), + ).resolves.toEqual(true); + }, + ), + ); + + test( + `it should correctly resolve native Node modules`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('fs') ? true : false`)).resolves.toEqual(true); + }, + ), + ); + + test( + `it should correctly resolve relative imports`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await writeFile(`${path}/foo.js`, `module.exports = 42;\n`); + + await run(`install`); + + await expect(source(`require('./foo.js')`)).resolves.toEqual(42); + }, + ), + ); + + test( + `it should correctly resolve deep imports`, + makeTemporaryEnv( + { + dependencies: {[`various-requires`]: `1.0.0`}, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('various-requires/alternative-index')`)).resolves.toEqual(42); + }, + ), + ); + + test( + `it should correctly resolve relative imports from within dependencies`, + makeTemporaryEnv( + { + dependencies: { + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('various-requires/relative-require')`)).resolves.toEqual(42); + }, + ), + ); + + test( + `it should correctly resolve an absolute path even when the issuer doesn't exist`, + makeTemporaryEnv({}, {plugNPlay: true}, async ({path, run, source}) => { + await run(`install`); + + const api = require(`${path}/.pnp.js`); + api.resolveToUnqualified(`${path}/.pnp.js`, `${path}/some/path/that/doesnt/exists/please/`); + }), + ); + + test( + `it should fallback to the top-level dependencies when it cannot require a transitive dependency require`, + makeTemporaryEnv( + {dependencies: {[`various-requires`]: `1.0.0`, [`no-deps`]: `1.0.0`}}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('various-requires/invalid-require')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should throw an exception if a dependency tries to require something it doesn't own`, + makeTemporaryEnv( + {dependencies: {[`various-requires`]: `1.0.0`}}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('various-requires/invalid-require')`)).rejects.toBeTruthy(); + }, + ), + ); + + test( + `it should allow packages to require themselves`, + makeTemporaryEnv( + { + dependencies: {[`various-requires`]: `1.0.0`}, + }, + {plugNPlay: true}, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('various-requires/self') === require('various-requires')`)).resolves.toEqual( + true, + ); + }, + ), + ); + + test( + `it should not add the implicit self dependency if an explicit one already exists`, + makeTemporaryEnv( + { + dependencies: {[`self-require-trap`]: `1.0.0`}, + }, + {plugNPlay: true}, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('self-require-trap/self') !== require('self-require-trap')`)).resolves.toEqual( + true, + ); + }, + ), + ); + + test( + `it should run scripts using a Node version that auto-injects the hook`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `1.0.0`}, + scripts: {myScript: `node -p 'require("no-deps/package.json").version'`}, + }, + { + plugNPlay: true, + }, + async ({path, run}) => { + await run(`install`); + + await expect(run(`myScript`)).resolves.toMatchObject({ + stdout: `1.0.0\n`, + }); + }, + ), + ); + + test( + `it should install in such a way that two identical packages with different peer dependencies are different instances`, + makeTemporaryEnv( + { + dependencies: {[`provides-peer-deps-1-0-0`]: `1.0.0`, [`provides-peer-deps-2-0-0`]: `1.0.0`}, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect( + source(`require('provides-peer-deps-1-0-0') !== require('provides-peer-deps-2-0-0')`), + ).resolves.toEqual(true); + + await expect(source(`require('provides-peer-deps-1-0-0')`)).resolves.toMatchObject({ + name: `provides-peer-deps-1-0-0`, + version: `1.0.0`, + dependencies: { + [`peer-deps`]: { + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }, + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }); + + await expect(source(`require('provides-peer-deps-2-0-0')`)).resolves.toMatchObject({ + name: `provides-peer-deps-2-0-0`, + version: `1.0.0`, + dependencies: { + [`peer-deps`]: { + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `2.0.0`, + }, + }, + }, + [`no-deps`]: { + name: `no-deps`, + version: `2.0.0`, + }, + }, + }); + }, + ), + ); + + test( + `it should support the use case of using the result of require.resolve(...) to load a package`, + makeTemporaryEnv( + { + dependencies: {[`custom-dep-a`]: `file:./custom-dep-a`}, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await writeFile( + `${path}/custom-dep-a/index.js`, + `module.exports = require('custom-dep-b')(require.resolve('no-deps'))`, + ); + await writeJson(`${path}/custom-dep-a/package.json`, { + name: `custom-dep-a`, + version: `1.0.0`, + dependencies: {[`custom-dep-b`]: `file:../custom-dep-b`, [`no-deps`]: `1.0.0`}, + }); + + await writeFile(`${path}/custom-dep-b/index.js`, `module.exports = path => require(path)`); + await writeJson(`${path}/custom-dep-b/package.json`, {name: `custom-dep-b`, version: `1.0.0`}); + + await run(`install`); + + await expect(source(`require('custom-dep-a')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should not break the tree path when loading through the result of require.resolve(...)`, + makeTemporaryEnv( + { + dependencies: {[`custom-dep-a`]: `file:./custom-dep-a`}, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await writeFile( + `${path}/custom-dep-a/index.js`, + `module.exports = require('custom-dep-b')(require.resolve('custom-dep-c'))`, + ); + await writeJson(`${path}/custom-dep-a/package.json`, { + name: `custom-dep-a`, + version: `1.0.0`, + dependencies: {[`custom-dep-b`]: `file:../custom-dep-b`, [`custom-dep-c`]: `file:../custom-dep-c`}, + }); + + await writeFile(`${path}/custom-dep-b/index.js`, `module.exports = path => require(path)`); + await writeJson(`${path}/custom-dep-b/package.json`, {name: `custom-dep-b`, version: `1.0.0`}); + + await writeFile(`${path}/custom-dep-c/index.js`, `module.exports = require('no-deps')`); + await writeJson(`${path}/custom-dep-c/package.json`, { + name: `custom-dep-c`, + version: `1.0.0`, + dependencies: {[`no-deps`]: `1.0.0`}, + }); + + await run(`install`); + + await expect(source(`require('custom-dep-a')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should load the index.js file when loading from a folder`, + makeTemporaryEnv({}, {plugNPlay: true}, async ({path, run, source}) => { + await run(`install`); + + const tmp = await createTemporaryFolder(); + + await writeFile(`${tmp}/folder/index.js`, `module.exports = 42;`); + + await expect(source(`require("${tmp}/folder")`)).resolves.toEqual(42); + }), + ); + + test( + `it should resolve the .js extension`, + makeTemporaryEnv({}, {plugNPlay: true}, async ({path, run, source}) => { + await run(`install`); + + const tmp = await createTemporaryFolder(); + + await writeFile(`${tmp}/file.js`, `module.exports = 42;`); + + await expect(source(`require("${tmp}/file")`)).resolves.toEqual(42); + }), + ); + + test( + `it should use the regular Node resolution when requiring files outside of the pnp install tree`, + makeTemporaryEnv({}, {plugNPlay: true}, async ({path, run, source}) => { + await run(`install`); + + const tmp = await createTemporaryFolder(); + + await writeFile(`${tmp}/node_modules/dep/index.js`, `module.exports = 42;`); + await writeFile(`${tmp}/index.js`, `require('dep')`); + + await source(`require("${tmp}/index.js")`); + }), + ); + + test( + `it should allow scripts outside of the dependency tree to require files within the dependency tree`, + makeTemporaryEnv( + {dependencies: {[`no-deps`]: `1.0.0`}}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + const tmp = await createTemporaryFolder(); + + await writeFile(`${tmp}/index.js`, `require(process.argv[2])`); + await writeFile(`${path}/index.js`, `require('no-deps')`); + + await run(`node`, `${tmp}/index.js`, `${path}/index.js`); + }, + ), + ); + + test( + `it should export the PnP API through the 'pnpapi' name`, + makeTemporaryEnv( + {dependencies: {[`no-deps`]: `1.0.0`}}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(source(`typeof require('pnpapi').VERSIONS.std`)).resolves.toEqual(`number`); + }, + ), + ); + + test( + `it should not update the installConfig.pnp field of the package.json when installing with an environment override`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(readJson(`${path}/package.json`)).resolves.not.toMatchObject({ + installConfig: {pnp: true}, + }); + }, + ), + ); + + test( + `it should update the installConfig.pnp field of the package.json when installing with --enable-pnp`, + makeTemporaryEnv({}, async ({path, run, source}) => { + await run(`install`, `--enable-pnp`); + + await expect(readJson(`${path}/package.json`)).resolves.toMatchObject({ + installConfig: {pnp: true}, + }); + }), + ); + + test( + `it should install dependencies using pnp when the installConfig.pnp field is set to true`, + makeTemporaryEnv( + { + dependencies: {[`no-deps`]: `1.0.0`}, + installConfig: {pnp: true}, + }, + async ({path, run, source}) => { + await run(`install`); + + expect(existsSync(`${path}/.pnp.js`)).toEqual(true); + }, + ), + ); + + test( + `it should update the installConfig.pnp field of the package.json when installing with --disable-pnp`, + makeTemporaryEnv( + { + installConfig: {pnp: true}, + }, + async ({path, run, source}) => { + await run(`install`, `--disable-pnp`); + + await expect(readJson(`${path}/package.json`)).resolves.not.toHaveProperty('installConfig.pnp'); + }, + ), + ); + + test( + `it should not remove other fields than installConfig.pnp when using --disable-pnp`, + makeTemporaryEnv( + { + installConfig: {pnp: true, foo: true}, + }, + async ({path, run, source}) => { + await run(`install`, `--disable-pnp`); + + await expect(readJson(`${path}/package.json`)).resolves.toHaveProperty('installConfig.foo', true); + }, + ), + ); + + test( + `it should generate a file that can be used as an executable to resolve a request (valid request)`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + expect(statSync(`${path}/.pnp.js`).mode & 0o111).toEqual(0o111); + + const result = JSON.parse(cp.execFileSync(`${path}/.pnp.js`, [`no-deps`, `${path}/`], {encoding: `utf-8`})); + + expect(result[0]).toEqual(null); + expect(typeof result[1]).toEqual(`string`); + + expect(require(result[1])).toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should generate a file that can be used as an executable to resolve a request (builtin request)`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + expect(statSync(`${path}/.pnp.js`).mode & 0o111).toEqual(0o111); + + const result = JSON.parse(cp.execFileSync(`${path}/.pnp.js`, [`fs`, `${path}/`], {encoding: `utf-8`})); + + expect(result[0]).toEqual(null); + expect(result[1]).toEqual(null); + }, + ), + ); + + test( + `it should generate a file that can be used as an executable to resolve a request (invalid request)`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + expect(statSync(`${path}/.pnp.js`).mode & 0o111).toEqual(0o111); + + const result = JSON.parse( + cp.execFileSync(`${path}/.pnp.js`, [`doesnt-exists`, `${path}/`], {encoding: `utf-8`}), + ); + + expect(typeof result[0].code).toEqual(`string`); + expect(typeof result[0].message).toEqual(`string`); + + expect(result[1]).toEqual(null); + }, + ), + ); + + test( + `it should generate a file with a custom shebang if configured as such`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + plugnplayShebang: `foo`, + }, + async ({path, run, source}) => { + await run(`install`); + + expect(await readFile(`${path}/.pnp.js`, `utf-8`)).toMatch(/^#!foo\n/); + }, + ), + ); + + it( + `it should not be enabled for paths matching the specified regex`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + plugnplayBlacklist: `/foo/`, + }, + async ({path, run, source}) => { + await writeFile(`${path}/foo/shouldwork.js`, `module.exports = require('bad-dep');\n`); + await writeFile(`${path}/doesntwork.js`, `module.exports = require('bad-dep');\n`); + + await run(`install`); + + // Force it to exist so that the two scripts would succeed if using the node resolution + await writeFile(`${path}/node_modules/bad-dep/index.js`, `module.exports = 42;\n`); + + await expect(source(`require('./doesntwork')`)).rejects.toBeTruthy(); + await expect(source(`require('./foo/shouldwork')`)).resolves.toBeTruthy(); + }, + ), + ); + + it( + `it should not break relative requires for files within a blacklist`, + makeTemporaryEnv( + {}, + { + plugNPlay: true, + plugnplayBlacklist: `/foo/`, + }, + async ({path, run, source}) => { + await writeFile(`${path}/foo/filea.js`, `module.exports = require('./fileb');\n`); + await writeFile(`${path}/foo/fileb.js`, `module.exports = 42;\n`); + + await run(`install`); + + await expect(source(`require('./foo/filea')`)).resolves.toEqual(42); + }, + ), + ); + + test( + `it should install the packages within a node_modules directory (even if within the cache)`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + // This is to allow a maximal compatibility with packages that expect to + // be located inside a node_modules directory. Various tools (such as + // transpilers) also use regexps in their configuration that it would be + // nice not to break. + + await run(`install`); + + expect(await source(`require.resolve('no-deps')`)).toMatch(/[\\\/]node_modules[\\\/]no-deps[\\\/]/); + }, + ), + ); + + test( + `it should install packages with peer dependencies within a node_modules directory (even if within the .pnp folder)`, + makeTemporaryEnv( + { + dependencies: { + [`peer-deps`]: `1.0.0`, + [`no-deps`]: `2.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + // This is to allow a maximal compatibility with packages that expect to + // be located inside a node_modules directory. Various tools (such as + // transpilers) also use regexps in their configuration that it would be + // nice not to break. + + await run(`install`); + + expect(await source(`require.resolve('peer-deps')`)).toMatch(/[\\\/]node_modules[\\\/]peer-deps[\\\/]/); + }, + ), + ); + + test( + `it should make it possible to copy the pnp file and cache from one place to another`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await makeTemporaryEnv( + { + [`no-deps`]: `1.0.0`, + }, + { + plugNPlay: true, + }, + async ({path: path2, run: run2, source: source2}) => { + // Move the install artifacts into a new location + // If the .pnp.js file references absolute paths, they will stop working + await rename(`${path}/.cache`, `${path2}/.cache`); + await rename(`${path}/.pnp.js`, `${path2}/.pnp.js`); + + await expect(source2(`require('no-deps')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + )(); + }, + ), + ); + + test( + `it should generate the same hooks for two projects with the same configuration`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + + await makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path: path2, run: run2, source: source2}) => { + expect(path2).not.toEqual(path); + + await run2(`install`); + + expect(readFile(`${path2}/.pnp.js`, 'utf8')).resolves.toEqual(await readFile(`${path}/.pnp.js`, 'utf8')); + }, + )(); + }, + ), + ); + + test( + `it should allow unplugging packages from a pnp installation`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + await run(`unplug`, `various-requires`); + + const listing = await readdir(`${path}/.pnp/unplugged`); + expect(listing).toHaveLength(1); + + await writeFile( + `${path}/.pnp/unplugged/${listing[0]}/node_modules/various-requires/alternative-index.js`, + `module.exports = "unplugged";\n`, + ); + + await expect(source(`require('various-requires/relative-require')`)).resolves.toMatch('unplugged'); + await expect(source(`require('no-deps/package.json')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should allow unplugging packages from a still uninstalled pnp installation`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `various-requires`); + + const listing = await readdir(`${path}/.pnp/unplugged`); + expect(listing).toHaveLength(1); + + await writeFile( + `${path}/.pnp/unplugged/${listing[0]}/node_modules/various-requires/alternative-index.js`, + `module.exports = "unplugged";\n`, + ); + + await expect(source(`require('various-requires/relative-require')`)).resolves.toMatch('unplugged'); + await expect(source(`require('no-deps/package.json')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should produce an error if unplugging with pnp disabled`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: false, + }, + async ({path, run, source}) => { + await expect(run(`unplug`, `various-requires`)).rejects.toBeTruthy(); + }, + ), + ); + + test( + `it should allow unplugging multiple (deep) packages from a pnp installation`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `2.0.0`, + [`one-fixed-dep`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + await run(`unplug`, `various-requires`, `no-deps`); + + await expect(readdir(`${path}/.pnp/unplugged`)).resolves.toHaveLength(3); + }, + ), + ); + + test( + 'it should allow unplugging package (semver) ranges from a pnp installation', + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `2.0.0`, + [`one-fixed-dep`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + await run(`unplug`, `various-requires`, `no-deps@^1.0.0`); + + await expect(readdir(`${path}/.pnp/unplugged`)).resolves.toHaveLength(2); + }, + ), + ); + + test( + 'it should properly unplug a package with peer dependencies', + makeTemporaryEnv( + { + dependencies: {[`provides-peer-deps-1-0-0`]: `1.0.0`, [`provides-peer-deps-2-0-0`]: `1.0.0`}, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `no-deps`, `peer-deps`); + + await expect( + source(`require('provides-peer-deps-1-0-0') !== require('provides-peer-deps-2-0-0')`), + ).resolves.toEqual(true); + + await expect(source(`require('provides-peer-deps-1-0-0')`)).resolves.toMatchObject({ + name: `provides-peer-deps-1-0-0`, + version: `1.0.0`, + dependencies: { + [`peer-deps`]: { + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }, + [`no-deps`]: { + name: `no-deps`, + version: `1.0.0`, + }, + }, + }); + + await expect(source(`require('provides-peer-deps-2-0-0')`)).resolves.toMatchObject({ + name: `provides-peer-deps-2-0-0`, + version: `1.0.0`, + dependencies: { + [`peer-deps`]: { + name: `peer-deps`, + version: `1.0.0`, + peerDependencies: { + [`no-deps`]: { + name: `no-deps`, + version: `2.0.0`, + }, + }, + }, + [`no-deps`]: { + name: `no-deps`, + version: `2.0.0`, + }, + }, + }); + }, + ), + ); + + test( + `it shouldn't clear the unplugged folder when running an install`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `various-requires`); + await run(`install`); + + await expect(readdir(`${path}/.pnp/unplugged`)).resolves.toHaveLength(1); + }, + ), + ); + + test( + `it shouldn't clear the unplugged folder when unplugging new packages`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `various-requires`); + await run(`unplug`, `no-deps`); + + await expect(readdir(`${path}/.pnp/unplugged`)).resolves.toHaveLength(2); + }, + ), + ); + + test( + `it should clear the specified packages when using --clear`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `various-requires`); + + await expect(readdir(`${path}/.pnp/unplugged`)).resolves.toHaveLength(1); + + await run(`unplug`, `various-requires`, `--clear`); + + expect(existsSync(`${path}/.pnp/unplugged`)).toEqual(false); + }, + ), + ); + + test( + `it should clear the whole unplugged folder when using unplug --clear-all`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`unplug`, `various-requires`); + await run(`unplug`, `--clear-all`); + + expect(existsSync(`${path}/.pnp/unplugged`)).toEqual(false); + }, + ), + ); + + test( + `it should not override an already unplugged package`, + makeTemporaryEnv( + { + dependencies: { + [`no-deps`]: `1.0.0`, + [`various-requires`]: `1.0.0`, + }, + }, + { + plugNPlay: true, + }, + async ({path, run, source}) => { + await run(`install`); + await run(`unplug`, `various-requires`); + + const listing = await readdir(`${path}/.pnp/unplugged`); + expect(listing).toHaveLength(1); + + await writeFile( + `${path}/.pnp/unplugged/${listing[0]}/node_modules/various-requires/alternative-index.js`, + `module.exports = "unplugged";\n`, + ); + + await run(`unplug`, `various-requires`, `no-deps`); + + await expect(source(`require('various-requires/relative-require')`)).resolves.toMatch('unplugged'); + await expect(source(`require('no-deps/package.json')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `1.0.0`, + }); + }, + ), + ); + + test( + `it should automatically unplug packages with postinstall scripts`, + makeTemporaryEnv( + { + dependencies: {[`no-deps-scripted`]: `1.0.0`}, + }, + {plugNPlay: true}, + async ({path, run, source}) => { + await run(`install`); + + const resolution = await source(`require.resolve('no-deps-scripted')`); + const cacheRelativeResolution = relative(`${path}/.cache`, resolution); + + expect( + cacheRelativeResolution && + !cacheRelativeResolution.startsWith(`..${path.sep}`) && + !isAbsolute(cacheRelativeResolution), + ); + }, + ), + ); + + test( + `it should not cache the postinstall artifacts`, + makeTemporaryEnv( + { + dependencies: {[`no-deps-scripted`]: `1.0.0`}, + }, + {plugNPlay: true}, + async ({path, run, source}) => { + await run(`install`); + + const rndBefore = await source(`require('no-deps-scripted/rnd.js')`); + + await remove(`${path}/.pnp`); + await remove(`${path}/.pnp.js`); + + await run(`install`); + + const rndAfter = await source(`require('no-deps-scripted/rnd.js')`); + + // It might fail once every blue moon, when the two random numbers are equal + expect(rndAfter).not.toEqual(rndBefore); + }, + ), + ); + }); +}; diff --git a/packages/pkg-tests/pkg-tests-specs/sources/script.js b/packages/pkg-tests/pkg-tests-specs/sources/script.js index e019cce870..999d6c452c 100644 --- a/packages/pkg-tests/pkg-tests-specs/sources/script.js +++ b/packages/pkg-tests/pkg-tests-specs/sources/script.js @@ -2,7 +2,10 @@ import type {PackageDriver} from 'pkg-tests-core'; -const {fs: {makeFakeBinary}} = require(`pkg-tests-core`); +const {existsSync, mkdirp} = require('fs-extra'); +const {isAbsolute, resolve} = require('path'); + +const {fs: {createTemporaryFolder, makeFakeBinary}} = require(`pkg-tests-core`); module.exports = (makeTemporaryEnv: PackageDriver) => { describe(`Scripts tests`, () => { @@ -16,5 +19,189 @@ module.exports = (makeTemporaryEnv: PackageDriver) => { }); }), ); + + test( + `it should run scripts using the same package manager than the one running the scripts`, + makeTemporaryEnv({scripts: {myScript: `yarn --version`}}, async ({path, run, source}) => { + await makeFakeBinary(`${path}/bin/yarn`); + + await expect(run(`run`, `myScript`)).resolves.toMatchObject({ + stdout: (await run(`--version`)).stdout, + }); + }), + ); + + test( + `it should run declared scripts`, + makeTemporaryEnv( + { + scripts: { + [`foobar`]: `echo test successful`, + }, + }, + async ({path, run, source}) => { + await expect(run(`run`, `foobar`)).resolves.toMatchObject({ + stdout: `test successful\n`, + }); + }, + ), + ); + + test( + `it should allow to execute the dependencies binaries`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(run(`run`, `has-bin-entries`, `success`)).resolves.toMatchObject({ + stdout: `success\n`, + }); + }, + ), + ); + + test( + `it should allow to execute the dependencies binaries even from a different cwd than the project root`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + await mkdirp(`${path}/foo/bar`); + + await expect( + run(`run`, `has-bin-entries`, `success`, { + cwd: `${path}/foo/bar`, + }), + ).resolves.toMatchObject({ + stdout: `success\n`, + }); + }, + ), + ); + + test( + `it should allow to retrieve the path to a dependency binary by its name`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + const {stdout} = await run(`bin`, `has-bin-entries`); + + expect(stdout.trim()).not.toEqual(``); + expect(existsSync(resolve(path, stdout.trim()))).toEqual(true); + }, + ), + ); + + test( + `it should return an absolute path when retrieving the path to a dependency binary`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + const {stdout} = await run(`bin`, `has-bin-entries`); + + expect(isAbsolute(stdout.trim())).toEqual(true); + }, + ), + ); + + test( + `it should allow to retrieve the path to a dependency binary, even when running from outside the project`, + makeTemporaryEnv( + { + dependencies: {[`has-bin-entries`]: `1.0.0`}, + }, + async ({path, run, source}) => { + await run(`install`); + + const tmp = await createTemporaryFolder(); + + const {stdout} = await run(`bin`, `has-bin-entries`, { + projectFolder: path, + cwd: tmp, + }); + + expect(stdout.trim()).not.toEqual(``); + expect(existsSync(resolve(tmp, stdout.trim()))).toEqual(true); + }, + ), + ); + + test( + `it shouldn't require the "--" flag to stop interpreting options after "run" commands`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(run(`run`, `has-bin-entries`, `--hello`)).resolves.toMatchObject({ + stdout: `--hello\n`, + }); + }, + ), + ); + + test( + `it should allow dependency binaries to require their own dependencies`, + makeTemporaryEnv( + { + dependencies: { + [`has-bin-entries`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await run(`install`); + + await expect(run(`run`, `has-bin-entries-with-require`)).resolves.toMatchObject({ + stdout: `no-deps\n1.0.0\n`, + }); + }, + ), + ); + + test( + `it should allow dependency binaries to require relative paths`, + makeTemporaryEnv({dependencies: {[`has-bin-entries`]: `1.0.0`}}, async ({path, run, source}) => { + await run(`install`); + + await expect(run(`run`, `has-bin-entries-with-relative-require`)).resolves.toMatchObject({ + stdout: `42\n`, + }); + }), + ); + + test( + `it should run install scripts during the install`, + makeTemporaryEnv({dependencies: {[`no-deps-scripted`]: `1.0.0`}}, async ({path, run, source}) => { + await run(`install`); + + await expect(source(`require('no-deps-scripted/log.js')`)).resolves.toEqual([100, 200, 300]); + }), + ); }); }; diff --git a/packages/pkg-tests/pkg-tests-specs/sources/workspace.js b/packages/pkg-tests/pkg-tests-specs/sources/workspace.js new file mode 100644 index 0000000000..c356c0154d --- /dev/null +++ b/packages/pkg-tests/pkg-tests-specs/sources/workspace.js @@ -0,0 +1,132 @@ +/* @flow */ + +import type {PackageDriver} from 'pkg-tests-core'; + +const {fs: {writeFile, writeJson}} = require('pkg-tests-core'); + +module.exports = (makeTemporaryEnv: PackageDriver) => { + describe(`Workspaces tests`, () => { + test( + `it should implicitely make workspaces require-able from the top-level`, + makeTemporaryEnv( + { + private: true, + workspaces: [`packages/*`], + }, + async ({path, run, source}) => { + await writeJson(`${path}/packages/workspace-a/package.json`, { + name: `workspace-a`, + version: `1.0.0`, + }); + + await writeFile( + `${path}/packages/workspace-a/index.js`, + ` + module.exports = 42; + `, + ); + + await run(`install`); + + await expect(source(`require('workspace-a')`)).resolves.toEqual(42); + }, + ), + ); + + test( + `it should allow workspaces to require each others`, + makeTemporaryEnv( + { + private: true, + workspaces: [`packages/*`], + dependencies: { + [`workspace-a`]: `1.0.0`, + [`workspace-b`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await writeJson(`${path}/packages/workspace-a/package.json`, { + name: `workspace-a`, + version: `1.0.0`, + dependencies: { + [`workspace-a`]: `1.0.0`, + }, + }); + + await writeFile( + `${path}/packages/workspace-a/index.js`, + ` + module.exports = require('workspace-b/package.json'); + `, + ); + + await writeJson(`${path}/packages/workspace-b/package.json`, { + name: `workspace-b`, + version: `1.0.0`, + dependencies: { + [`workspace-b`]: `1.0.0`, + }, + }); + + await writeFile( + `${path}/packages/workspace-b/index.js`, + ` + module.exports = require('workspace-a/package.json'); + `, + ); + + await run(`install`); + + await expect(source(`require('workspace-a')`)).resolves.toMatchObject({ + name: `workspace-b`, + }); + + await expect(source(`require('workspace-b')`)).resolves.toMatchObject({ + name: `workspace-a`, + }); + }, + ), + ); + + test( + `it should resolve workspaces as regular packages if the versions don't match`, + makeTemporaryEnv( + { + private: true, + workspaces: [`packages/*`], + dependencies: { + [`workspace`]: `1.0.0`, + }, + }, + async ({path, run, source}) => { + await writeJson(`${path}/packages/workspace/package.json`, { + name: `workspace`, + version: `1.0.0`, + dependencies: { + [`no-deps`]: `2.0.0`, + }, + }); + + await writeFile( + `${path}/packages/workspace/index.js`, + ` + module.exports = require('no-deps/package.json'); + `, + ); + + await writeJson(`${path}/packages/no-deps/package.json`, { + name: `no-deps`, + version: `1.0.0`, + }); + + await run(`install`); + + await expect(source(`require('workspace')`)).resolves.toMatchObject({ + name: `no-deps`, + version: `2.0.0`, + }); + }, + ), + ); + }); +}; diff --git a/packages/pkg-tests/yarn.lock b/packages/pkg-tests/yarn.lock index a0fd840cc3..8adeb97797 100644 --- a/packages/pkg-tests/yarn.lock +++ b/packages/pkg-tests/yarn.lock @@ -91,11 +91,11 @@ anymatch@^1.3.0: micromatch "^2.1.5" normalize-path "^2.0.0" -append-transform@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" +append-transform@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-1.0.0.tgz#046a52ae582a228bd72f58acfbe2967c678759ab" dependencies: - default-require-extensions "^1.0.0" + default-require-extensions "^2.0.0" aproba@^1.0.3: version "1.2.0" @@ -120,10 +120,18 @@ arr-diff@^2.0.0: dependencies: arr-flatten "^1.0.1" -arr-flatten@^1.0.1: +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + +arr-flatten@^1.0.1, arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + array-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" @@ -140,6 +148,10 @@ array-unique@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + arrify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" @@ -156,6 +168,10 @@ assert-plus@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + astral-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" @@ -178,6 +194,10 @@ asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" +atob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.1.tgz#ae2d5a729477f289d60dd7f96a6314a22dd6c22a" + aws-sign2@~0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" @@ -336,12 +356,12 @@ babel-helpers@^6.24.1: babel-runtime "^6.22.0" babel-template "^6.24.1" -babel-jest@^22.2.2: - version "22.2.2" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-22.2.2.tgz#eda38dca284e32cc5257f96a9b51351975de4e04" +babel-jest@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-23.0.1.tgz#bbad3bf523fb202da05ed0a6540b48c84eed13a6" dependencies: - babel-plugin-istanbul "^4.1.5" - babel-preset-jest "^22.2.0" + babel-plugin-istanbul "^4.1.6" + babel-preset-jest "^23.0.1" babel-messages@^6.23.0: version "6.23.0" @@ -355,17 +375,18 @@ babel-plugin-check-es2015-constants@^6.22.0: dependencies: babel-runtime "^6.22.0" -babel-plugin-istanbul@^4.1.5: - version "4.1.5" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.5.tgz#6760cdd977f411d3e175bb064f2bc327d99b2b6e" +babel-plugin-istanbul@^4.1.6: + version "4.1.6" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz#36c59b2192efce81c5b378321b74175add1c9a45" dependencies: + babel-plugin-syntax-object-rest-spread "^6.13.0" find-up "^2.1.0" - istanbul-lib-instrument "^1.7.5" - test-exclude "^4.1.1" + istanbul-lib-instrument "^1.10.1" + test-exclude "^4.2.1" -babel-plugin-jest-hoist@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.2.0.tgz#bd34f39d652406669713b8c89e23ef25c890b993" +babel-plugin-jest-hoist@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-23.0.1.tgz#eaa11c964563aea9c21becef2bdf7853f7f3c148" babel-plugin-syntax-async-functions@^6.8.0: version "6.13.0" @@ -632,11 +653,11 @@ babel-preset-flow@^6.23.0: dependencies: babel-plugin-transform-flow-strip-types "^6.22.0" -babel-preset-jest@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-22.2.0.tgz#f77b43f06ef4d8547214b2e206cc76a25c3ba0e2" +babel-preset-jest@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-23.0.1.tgz#631cc545c6cf021943013bcaf22f45d87fe62198" dependencies: - babel-plugin-jest-hoist "^22.2.0" + babel-plugin-jest-hoist "^23.0.1" babel-plugin-syntax-object-rest-spread "^6.13.0" babel-register@^6.26.0: @@ -699,6 +720,18 @@ balanced-match@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + bcrypt-pbkdf@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" @@ -750,6 +783,21 @@ braces@^1.8.2: preserve "^0.2.0" repeat-element "^1.1.2" +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + browser-process-hrtime@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-0.1.2.tgz#425d68a58d3447f02a04aa894187fce8af8b7b8e" @@ -773,10 +821,28 @@ bser@^2.0.0: dependencies: node-int64 "^0.4.0" +buffer-from@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.0.tgz#87fcaa3a298358e0ade6e442cfce840740d1ad04" + builtin-modules@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + callsites@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" @@ -830,6 +896,15 @@ ci-info@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.1.2.tgz#03561259db48d0474c8bdc90f5b47b068b6bbfb4" +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + cliui@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" @@ -854,6 +929,13 @@ code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + color-convert@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed" @@ -870,6 +952,14 @@ combined-stream@1.0.6, combined-stream@^1.0.5, combined-stream@~1.0.5: dependencies: delayed-stream "~1.0.0" +compare-versions@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-3.3.0.tgz#af93ea705a96943f622ab309578b9b90586f39c3" + +component-emitter@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" + concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" @@ -886,6 +976,10 @@ convert-source-map@^1.4.0, convert-source-map@^1.5.0: version "1.5.1" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5" +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + core-js@^2.4.0, core-js@^2.5.0: version "2.5.3" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.3.tgz#8acc38345824f16d8365b7c9b4259168e8ed603e" @@ -930,7 +1024,7 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" -debug@^2.2.0, debug@^2.6.8: +debug@^2.2.0, debug@^2.3.3, debug@^2.6.8: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" dependencies: @@ -946,6 +1040,10 @@ decamelize@^1.0.0, decamelize@^1.1.1: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + deep-extend@~0.4.0: version "0.4.2" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" @@ -954,11 +1052,11 @@ deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" -default-require-extensions@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8" +default-require-extensions@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-2.0.0.tgz#f5f8fbb18a7d6d50b21f641f649ebb522cfe24f7" dependencies: - strip-bom "^2.0.0" + strip-bom "^3.0.0" define-properties@^1.1.2: version "1.1.2" @@ -967,6 +1065,25 @@ define-properties@^1.1.2: foreach "^2.0.5" object-keys "^1.0.8" +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" @@ -1098,22 +1215,47 @@ expand-brackets@^0.1.4: dependencies: is-posix-bracket "^0.1.0" +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + expand-range@^1.8.1: version "1.8.2" resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" dependencies: fill-range "^2.1.0" -expect@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-22.3.0.tgz#b1cb7db27a951ab6055f43937277152a9f668028" +expect@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/expect/-/expect-23.1.0.tgz#bfdfd57a2a20170d875999ee9787cc71f01c205f" dependencies: ansi-styles "^3.2.0" - jest-diff "^22.1.0" + jest-diff "^23.0.1" jest-get-type "^22.1.0" - jest-matcher-utils "^22.2.0" - jest-message-util "^22.2.0" - jest-regex-util "^22.1.0" + jest-matcher-utils "^23.0.1" + jest-message-util "^23.1.0" + jest-regex-util "^23.0.0" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" extend@~3.0.0, extend@~3.0.1: version "3.0.1" @@ -1125,6 +1267,19 @@ extglob@^0.3.1: dependencies: is-extglob "^1.0.0" +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" @@ -1172,6 +1327,15 @@ fill-range@^2.1.0: repeat-element "^1.1.2" repeat-string "^1.5.2" +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + find-up@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" @@ -1189,7 +1353,7 @@ flow-bin@^0.66.0: version "0.66.0" resolved "https://registry.yarnpkg.com/flow-bin/-/flow-bin-0.66.0.tgz#a96dde7015dc3343fd552a7b4963c02be705ca26" -for-in@^1.0.1: +for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -1223,9 +1387,15 @@ form-data@~2.3.1: combined-stream "1.0.6" mime-types "^2.1.12" -fs-extra@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-6.0.0.tgz#0f0afb290bb3deb87978da816fcd3c7797f3a817" +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + dependencies: + map-cache "^0.2.2" + +fs-extra@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.0.tgz#8cc3f47ce07ef7b3593a11b9fb245f7e34c041d6" dependencies: graceful-fs "^4.1.2" jsonfile "^4.0.0" @@ -1284,6 +1454,10 @@ get-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -1376,6 +1550,33 @@ has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + has@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" @@ -1481,6 +1682,18 @@ invert-kv@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + dependencies: + kind-of "^6.0.0" + is-array@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-array/-/is-array-1.0.1.tgz#e9850cc2cc860c3bc0977e84ccf0dd464584279a" @@ -1509,10 +1722,38 @@ is-ci@^1.0.10: dependencies: ci-info "^1.0.0" +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + dependencies: + kind-of "^6.0.0" + is-date-object@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + is-dotfile@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" @@ -1523,10 +1764,16 @@ is-equal-shallow@^0.1.3: dependencies: is-primitive "^2.0.0" -is-extendable@^0.1.1: +is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + dependencies: + is-plain-object "^2.0.4" + is-extglob@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" @@ -1569,6 +1816,22 @@ is-number@^3.0.0: dependencies: kind-of "^3.0.2" +is-number@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" + +is-odd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-2.0.0.tgz#7646624671fd7ea558ccd9a2795182f2958f1b24" + dependencies: + is-number "^4.0.0" + +is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + dependencies: + isobject "^3.0.1" + is-posix-bracket@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" @@ -1599,6 +1862,10 @@ is-utf8@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -1613,82 +1880,87 @@ isobject@^2.0.0: dependencies: isarray "1.0.0" +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" -istanbul-api@^1.1.14: - version "1.2.2" - resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.2.2.tgz#e17cd519dd5ec4141197f246fdf380b75487f3b1" +istanbul-api@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.3.1.tgz#4c3b05d18c0016d1022e079b98dc82c40f488954" dependencies: async "^2.1.4" + compare-versions "^3.1.0" fileset "^2.0.2" - istanbul-lib-coverage "^1.1.2" - istanbul-lib-hook "^1.1.0" - istanbul-lib-instrument "^1.9.2" - istanbul-lib-report "^1.1.3" - istanbul-lib-source-maps "^1.2.3" - istanbul-reports "^1.1.4" + istanbul-lib-coverage "^1.2.0" + istanbul-lib-hook "^1.2.0" + istanbul-lib-instrument "^1.10.1" + istanbul-lib-report "^1.1.4" + istanbul-lib-source-maps "^1.2.4" + istanbul-reports "^1.3.0" js-yaml "^3.7.0" mkdirp "^0.5.1" once "^1.4.0" -istanbul-lib-coverage@^1.1.1, istanbul-lib-coverage@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.2.tgz#4113c8ff6b7a40a1ef7350b01016331f63afde14" +istanbul-lib-coverage@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.0.tgz#f7d8f2e42b97e37fe796114cb0f9d68b5e3a4341" -istanbul-lib-hook@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.1.0.tgz#8538d970372cb3716d53e55523dd54b557a8d89b" +istanbul-lib-hook@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.2.1.tgz#f614ec45287b2a8fc4f07f5660af787575601805" dependencies: - append-transform "^0.4.0" + append-transform "^1.0.0" -istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0, istanbul-lib-instrument@^1.9.2: - version "1.9.2" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.9.2.tgz#84905bf47f7e0b401d6b840da7bad67086b4aab6" +istanbul-lib-instrument@^1.10.1: + version "1.10.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.1.tgz#724b4b6caceba8692d3f1f9d0727e279c401af7b" dependencies: babel-generator "^6.18.0" babel-template "^6.16.0" babel-traverse "^6.18.0" babel-types "^6.18.0" babylon "^6.18.0" - istanbul-lib-coverage "^1.1.2" + istanbul-lib-coverage "^1.2.0" semver "^5.3.0" -istanbul-lib-report@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.3.tgz#2df12188c0fa77990c0d2176d2d0ba3394188259" +istanbul-lib-report@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.4.tgz#e886cdf505c4ebbd8e099e4396a90d0a28e2acb5" dependencies: - istanbul-lib-coverage "^1.1.2" + istanbul-lib-coverage "^1.2.0" mkdirp "^0.5.1" path-parse "^1.0.5" supports-color "^3.1.2" -istanbul-lib-source-maps@^1.2.1, istanbul-lib-source-maps@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.3.tgz#20fb54b14e14b3fb6edb6aca3571fd2143db44e6" +istanbul-lib-source-maps@^1.2.4: + version "1.2.5" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.5.tgz#ffe6be4e7ab86d3603e4290d54990b14506fc9b1" dependencies: debug "^3.1.0" - istanbul-lib-coverage "^1.1.2" + istanbul-lib-coverage "^1.2.0" mkdirp "^0.5.1" rimraf "^2.6.1" source-map "^0.5.3" -istanbul-reports@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.4.tgz#5ccba5e22b7b5a5d91d5e0a830f89be334bf97bd" +istanbul-reports@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.3.0.tgz#2f322e81e1d9520767597dca3c20a0cce89a3554" dependencies: handlebars "^4.0.3" -jest-changed-files@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-22.2.0.tgz#517610c4a8ca0925bdc88b0ca53bd678aa8d019e" +jest-changed-files@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-23.0.1.tgz#f79572d0720844ea5df84c2a448e862c2254f60c" dependencies: throat "^4.0.0" -jest-cli@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-22.3.0.tgz#3fd986f2674f4168c91965be56ab9917a82a45db" +jest-cli@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-23.1.0.tgz#eb8bdd4ce0d15250892e31ad9b69bc99d2a8f6bf" dependencies: ansi-escapes "^3.0.0" chalk "^2.0.1" @@ -1697,23 +1969,25 @@ jest-cli@^22.3.0: graceful-fs "^4.1.11" import-local "^1.0.0" is-ci "^1.0.10" - istanbul-api "^1.1.14" - istanbul-lib-coverage "^1.1.1" - istanbul-lib-instrument "^1.8.0" - istanbul-lib-source-maps "^1.2.1" - jest-changed-files "^22.2.0" - jest-config "^22.3.0" - jest-environment-jsdom "^22.3.0" + istanbul-api "^1.3.1" + istanbul-lib-coverage "^1.2.0" + istanbul-lib-instrument "^1.10.1" + istanbul-lib-source-maps "^1.2.4" + jest-changed-files "^23.0.1" + jest-config "^23.1.0" + jest-environment-jsdom "^23.1.0" jest-get-type "^22.1.0" - jest-haste-map "^22.3.0" - jest-message-util "^22.2.0" - jest-regex-util "^22.1.0" - jest-resolve-dependencies "^22.1.0" - jest-runner "^22.3.0" - jest-runtime "^22.3.0" - jest-snapshot "^22.2.0" - jest-util "^22.3.0" - jest-worker "^22.2.2" + jest-haste-map "^23.1.0" + jest-message-util "^23.1.0" + jest-regex-util "^23.0.0" + jest-resolve-dependencies "^23.0.1" + jest-runner "^23.1.0" + jest-runtime "^23.1.0" + jest-snapshot "^23.0.1" + jest-util "^23.1.0" + jest-validate "^23.0.1" + jest-watcher "^23.1.0" + jest-worker "^23.0.1" micromatch "^2.3.11" node-notifier "^5.2.1" realpath-native "^1.0.0" @@ -1722,102 +1996,112 @@ jest-cli@^22.3.0: string-length "^2.0.0" strip-ansi "^4.0.0" which "^1.2.12" - yargs "^10.0.3" + yargs "^11.0.0" -jest-config@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-22.3.0.tgz#94c7149f123933a872ee24c1719687419c4a623c" +jest-config@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-23.1.0.tgz#708ca0f431d356ee424fb4895d3308006bdd8241" dependencies: + babel-core "^6.0.0" + babel-jest "^23.0.1" chalk "^2.0.1" glob "^7.1.1" - jest-environment-jsdom "^22.3.0" - jest-environment-node "^22.3.0" + jest-environment-jsdom "^23.1.0" + jest-environment-node "^23.1.0" jest-get-type "^22.1.0" - jest-jasmine2 "^22.3.0" - jest-regex-util "^22.1.0" - jest-resolve "^22.3.0" - jest-util "^22.3.0" - jest-validate "^22.2.2" - pretty-format "^22.1.0" - -jest-diff@^22.1.0: - version "22.1.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-22.1.0.tgz#0fad9d96c87b453896bf939df3dc8aac6919ac38" + jest-jasmine2 "^23.1.0" + jest-regex-util "^23.0.0" + jest-resolve "^23.1.0" + jest-util "^23.1.0" + jest-validate "^23.0.1" + pretty-format "^23.0.1" + +jest-diff@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-23.0.1.tgz#3d49137cee12c320a4b4d2b4a6fa6e82d491a16a" dependencies: chalk "^2.0.1" diff "^3.2.0" jest-get-type "^22.1.0" - pretty-format "^22.1.0" + pretty-format "^23.0.1" -jest-docblock@^22.2.2: - version "22.2.2" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-22.2.2.tgz#617f13edb16ec64202002b3c336cd14ae36c0631" +jest-docblock@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-23.0.1.tgz#deddd18333be5dc2415260a04ef3fce9276b5725" dependencies: detect-newline "^2.1.0" -jest-environment-jsdom@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.3.0.tgz#c267a063e5dc16219fba0e07542d8aa2576a1c88" +jest-each@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-23.1.0.tgz#16146b592c354867a5ae5e13cdf15c6c65b696c6" + dependencies: + chalk "^2.0.1" + pretty-format "^23.0.1" + +jest-environment-jsdom@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-23.1.0.tgz#85929914e23bed3577dac9755f4106d0697c479c" dependencies: - jest-mock "^22.2.0" - jest-util "^22.3.0" + jest-mock "^23.1.0" + jest-util "^23.1.0" jsdom "^11.5.1" -jest-environment-node@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.3.0.tgz#97d34d9706a718d743075149d1950555c10338c0" +jest-environment-node@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-23.1.0.tgz#452c0bf949cfcbbacda1e1762eeed70bc784c7d5" dependencies: - jest-mock "^22.2.0" - jest-util "^22.3.0" + jest-mock "^23.1.0" + jest-util "^23.1.0" jest-get-type@^22.1.0: version "22.1.0" resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.1.0.tgz#4e90af298ed6181edc85d2da500dbd2753e0d5a9" -jest-haste-map@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-22.3.0.tgz#e7f048a88735bae07ca12de8785eb8bc522adeab" +jest-haste-map@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-23.1.0.tgz#18e6c7d5a8d27136f91b7d9852f85de0c7074c49" dependencies: fb-watchman "^2.0.0" graceful-fs "^4.1.11" - jest-docblock "^22.2.2" - jest-worker "^22.2.2" + jest-docblock "^23.0.1" + jest-serializer "^23.0.1" + jest-worker "^23.0.1" micromatch "^2.3.11" sane "^2.0.0" -jest-jasmine2@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.3.0.tgz#ea127dfbb04c6e03998ae0358225435e47520666" +jest-jasmine2@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-23.1.0.tgz#4afab31729b654ddcd2b074add849396f13b30b8" dependencies: - callsites "^2.0.0" chalk "^2.0.1" co "^4.6.0" - expect "^22.3.0" - graceful-fs "^4.1.11" + expect "^23.1.0" is-generator-fn "^1.0.0" - jest-diff "^22.1.0" - jest-matcher-utils "^22.2.0" - jest-message-util "^22.2.0" - jest-snapshot "^22.2.0" - source-map-support "^0.5.0" + jest-diff "^23.0.1" + jest-each "^23.1.0" + jest-matcher-utils "^23.0.1" + jest-message-util "^23.1.0" + jest-snapshot "^23.0.1" + jest-util "^23.1.0" + pretty-format "^23.0.1" -jest-leak-detector@^22.1.0: - version "22.1.0" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-22.1.0.tgz#08376644cee07103da069baac19adb0299b772c2" +jest-leak-detector@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-23.0.1.tgz#9dba07505ac3495c39d3ec09ac1e564599e861a0" dependencies: - pretty-format "^22.1.0" + pretty-format "^23.0.1" -jest-matcher-utils@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-22.2.0.tgz#5390f823c18c748543d463825aa8e4df0db253ca" +jest-matcher-utils@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-23.0.1.tgz#0c6c0daedf9833c2a7f36236069efecb4c3f6e5f" dependencies: chalk "^2.0.1" jest-get-type "^22.1.0" - pretty-format "^22.1.0" + pretty-format "^23.0.1" -jest-message-util@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-22.2.0.tgz#84a6bb34186d8b9af7e0732fabbef63f7355f7b2" +jest-message-util@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-23.1.0.tgz#9a809ba487ecac5ce511d4e698ee3b5ee2461ea9" dependencies: "@babel/code-frame" "^7.0.0-beta.35" chalk "^2.0.1" @@ -1825,111 +2109,130 @@ jest-message-util@^22.2.0: slash "^1.0.0" stack-utils "^1.0.1" -jest-mock@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-22.2.0.tgz#444b3f9488a7473adae09bc8a77294afded397a7" +jest-mock@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-23.1.0.tgz#a381c31b121ab1f60c462a2dadb7b86dcccac487" -jest-regex-util@^22.1.0: - version "22.1.0" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-22.1.0.tgz#5daf2fe270074b6da63e5d85f1c9acc866768f53" +jest-regex-util@^23.0.0: + version "23.0.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-23.0.0.tgz#dd5c1fde0c46f4371314cf10f7a751a23f4e8f76" -jest-resolve-dependencies@^22.1.0: - version "22.1.0" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-22.1.0.tgz#340e4139fb13315cd43abc054e6c06136be51e31" +jest-resolve-dependencies@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-23.0.1.tgz#d01a10ddad9152c4cecdf5eac2b88571c4b6a64d" dependencies: - jest-regex-util "^22.1.0" + jest-regex-util "^23.0.0" + jest-snapshot "^23.0.1" -jest-resolve@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.3.0.tgz#648e797f708e8701071a0fa9fac652c577bb66d9" +jest-resolve@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-23.1.0.tgz#b9e316eecebd6f00bc50a3960d1527bae65792d2" dependencies: browser-resolve "^1.11.2" chalk "^2.0.1" + realpath-native "^1.0.0" -jest-runner@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-22.3.0.tgz#70393f62770be754e2d14f5ca3d896e408aa001a" +jest-runner@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-23.1.0.tgz#fa20a933fff731a5432b3561e7f6426594fa29b5" dependencies: exit "^0.1.2" - jest-config "^22.3.0" - jest-docblock "^22.2.2" - jest-haste-map "^22.3.0" - jest-jasmine2 "^22.3.0" - jest-leak-detector "^22.1.0" - jest-message-util "^22.2.0" - jest-runtime "^22.3.0" - jest-util "^22.3.0" - jest-worker "^22.2.2" + graceful-fs "^4.1.11" + jest-config "^23.1.0" + jest-docblock "^23.0.1" + jest-haste-map "^23.1.0" + jest-jasmine2 "^23.1.0" + jest-leak-detector "^23.0.1" + jest-message-util "^23.1.0" + jest-runtime "^23.1.0" + jest-util "^23.1.0" + jest-worker "^23.0.1" + source-map-support "^0.5.6" throat "^4.0.0" -jest-runtime@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-22.3.0.tgz#1883d6a4227c1f6af276ead3ed27654257d1ef8c" +jest-runtime@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-23.1.0.tgz#b4ae0e87259ecacfd4a884b639db07cf4dd620af" dependencies: babel-core "^6.0.0" - babel-jest "^22.2.2" - babel-plugin-istanbul "^4.1.5" + babel-plugin-istanbul "^4.1.6" chalk "^2.0.1" convert-source-map "^1.4.0" exit "^0.1.2" + fast-json-stable-stringify "^2.0.0" graceful-fs "^4.1.11" - jest-config "^22.3.0" - jest-haste-map "^22.3.0" - jest-regex-util "^22.1.0" - jest-resolve "^22.3.0" - jest-util "^22.3.0" - json-stable-stringify "^1.0.1" + jest-config "^23.1.0" + jest-haste-map "^23.1.0" + jest-message-util "^23.1.0" + jest-regex-util "^23.0.0" + jest-resolve "^23.1.0" + jest-snapshot "^23.0.1" + jest-util "^23.1.0" + jest-validate "^23.0.1" micromatch "^2.3.11" realpath-native "^1.0.0" slash "^1.0.0" strip-bom "3.0.0" write-file-atomic "^2.1.0" - yargs "^10.0.3" + yargs "^11.0.0" -jest-snapshot@^22.2.0: - version "22.2.0" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-22.2.0.tgz#0c0ba152d296ef70fa198cc84977a2cc269ee4cf" +jest-serializer@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-23.0.1.tgz#a3776aeb311e90fe83fab9e533e85102bd164165" + +jest-snapshot@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-23.0.1.tgz#6674fa19b9eb69a99cabecd415bddc42d6af3e7e" dependencies: chalk "^2.0.1" - jest-diff "^22.1.0" - jest-matcher-utils "^22.2.0" + jest-diff "^23.0.1" + jest-matcher-utils "^23.0.1" mkdirp "^0.5.1" natural-compare "^1.4.0" - pretty-format "^22.1.0" + pretty-format "^23.0.1" -jest-util@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-22.3.0.tgz#d05bff567a3a86c0e9b3838d812f8290aa768097" +jest-util@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-23.1.0.tgz#c0251baf34644c6dd2fea78a962f4263ac55772d" dependencies: callsites "^2.0.0" chalk "^2.0.1" graceful-fs "^4.1.11" is-ci "^1.0.10" - jest-message-util "^22.2.0" - jest-validate "^22.2.2" + jest-message-util "^23.1.0" mkdirp "^0.5.1" + slash "^1.0.0" + source-map "^0.6.0" -jest-validate@^22.2.2: - version "22.2.2" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.2.2.tgz#9cdce422c93cc28395e907ac6bbc929158d9a6ba" +jest-validate@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-23.0.1.tgz#cd9f01a89d26bb885f12a8667715e9c865a5754f" dependencies: chalk "^2.0.1" jest-get-type "^22.1.0" leven "^2.1.0" - pretty-format "^22.1.0" + pretty-format "^23.0.1" -jest-worker@^22.2.2: - version "22.2.2" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-22.2.2.tgz#c1f5dc39976884b81f68ec50cb8532b2cbab3390" +jest-watcher@^23.1.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-23.1.0.tgz#a8d5842e38d9fb4afff823df6abb42a58ae6cdbd" + dependencies: + ansi-escapes "^3.0.0" + chalk "^2.0.1" + string-length "^2.0.0" + +jest-worker@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-23.0.1.tgz#9e649dd963ff4046026f91c4017f039a6aa4a7bc" dependencies: merge-stream "^1.0.1" -jest@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/jest/-/jest-22.3.0.tgz#07434314d2e8662ea936552d950680b7e6551b0d" +jest@^23.0.0: + version "23.1.0" + resolved "https://registry.yarnpkg.com/jest/-/jest-23.1.0.tgz#bbb7f893100a11a742dd8bd0d047a54b0968ad1a" dependencies: import-local "^1.0.0" - jest-cli "^22.3.0" + jest-cli "^23.1.0" js-tokens@^3.0.0, js-tokens@^3.0.2: version "3.0.2" @@ -2026,7 +2329,7 @@ jsprim@^1.2.2: json-schema "0.2.3" verror "1.10.0" -kind-of@^3.0.2: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" dependencies: @@ -2038,6 +2341,14 @@ kind-of@^4.0.0: dependencies: is-buffer "^1.1.5" +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" + klaw@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/klaw/-/klaw-2.1.1.tgz#42b76894701169cc910fd0d19ce677b5fb378af1" @@ -2117,6 +2428,16 @@ makeerror@1.0.x: dependencies: tmpl "1.0.x" +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + dependencies: + object-visit "^1.0.0" + mem@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" @@ -2151,6 +2472,24 @@ micromatch@^2.1.5, micromatch@^2.3.11: parse-glob "^3.0.4" regex-cache "^0.4.2" +micromatch@^3.1.8: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + mime-db@~1.33.0: version "1.33.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db" @@ -2183,6 +2522,13 @@ minimist@~0.0.1: version "0.0.10" resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" +mixin-deep@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + "mkdirp@>=0.5 0", mkdirp@^0.5.1: version "0.5.1" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" @@ -2197,6 +2543,23 @@ nan@^2.3.0: version "2.8.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.8.0.tgz#ed715f3fe9de02b57a5e6252d90a96675e1f085a" +nanomatch@^1.2.9: + version "1.2.9" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.9.tgz#879f7150cb2dab7a471259066c104eee6e0fa7c2" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-odd "^2.0.0" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" @@ -2283,10 +2646,24 @@ object-assign@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + object-keys@^1.0.11, object-keys@^1.0.8: version "1.0.11" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + dependencies: + isobject "^3.0.0" + object.getownpropertydescriptors@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" @@ -2301,6 +2678,12 @@ object.omit@^2.0.0: for-own "^0.1.4" is-extendable "^0.1.1" +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + dependencies: + isobject "^3.0.1" + once@^1.3.0, once@^1.3.1, once@^1.3.3, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -2387,6 +2770,10 @@ parse5@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/parse5/-/parse5-4.0.0.tgz#6d78656e3da8d78b4ec0b906f7c08ef1dfe3f608" +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" @@ -2449,6 +2836,10 @@ pn@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb" +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" @@ -2461,9 +2852,9 @@ prettier@^1.10.2: version "1.10.2" resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.10.2.tgz#1af8356d1842276a99a5b5529c82dd9e9ad3cc93" -pretty-format@^22.1.0: - version "22.1.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-22.1.0.tgz#2277605b40ed4529ae4db51ff62f4be817647914" +pretty-format@^23.0.1: + version "23.0.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-23.0.1.tgz#d61d065268e4c759083bccbca27a01ad7c7601f4" dependencies: ansi-regex "^3.0.0" ansi-styles "^3.2.0" @@ -2574,6 +2965,13 @@ regex-cache@^0.4.2: dependencies: is-equal-shallow "^0.1.3" +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + regexpu-core@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" @@ -2600,7 +2998,7 @@ repeat-element@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" -repeat-string@^1.5.2: +repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" @@ -2696,10 +3094,18 @@ resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + resolve@1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" @@ -2716,6 +3122,12 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + dependencies: + ret "~0.1.10" + sane@^2.0.0: version "2.4.1" resolved "https://registry.yarnpkg.com/sane/-/sane-2.4.1.tgz#29f991208cf28636720efdc584293e7fd66663a5" @@ -2742,6 +3154,24 @@ set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" +set-value@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.1" + to-object-path "^0.3.0" + +set-value@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" @@ -2764,6 +3194,33 @@ slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + sntp@1.x.x: version "1.0.9" resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" @@ -2776,18 +3233,33 @@ sntp@2.x.x: dependencies: hoek "4.x.x" +source-map-resolve@^0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.2.tgz#72e2cc34095543e43b2c62b2c4c10d4a9054f259" + dependencies: + atob "^2.1.1" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + source-map-support@^0.4.15: version "0.4.18" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" dependencies: source-map "^0.5.6" -source-map-support@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.3.tgz#2b3d5fff298cfa4d1afd7d4352d569e9a0158e76" +source-map-support@^0.5.6: + version "0.5.6" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.6.tgz#4435cee46b1aab62b8e8610ce60f788091c51c13" dependencies: + buffer-from "^1.0.0" source-map "^0.6.0" +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + source-map@^0.4.4: version "0.4.4" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" @@ -2816,6 +3288,12 @@ spdx-license-ids@^1.0.2: version "1.2.2" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + dependencies: + extend-shallow "^3.0.0" + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -2838,6 +3316,13 @@ stack-utils@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.1.tgz#d4f33ab54e8e38778b0ca5cfd3b3afb12db68620" +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + stealthy-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" @@ -2886,7 +3371,7 @@ strip-ansi@^4.0.0: dependencies: ansi-regex "^3.0.0" -strip-bom@3.0.0: +strip-bom@3.0.0, strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" @@ -2972,12 +3457,12 @@ tar@^2.2.1: fstream "^1.0.2" inherits "2" -test-exclude@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.2.0.tgz#07e3613609a362c74516a717515e13322ab45b3c" +test-exclude@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.2.1.tgz#dfa222f03480bca69207ca728b37d74b45f724fa" dependencies: arrify "^1.0.1" - micromatch "^2.3.11" + micromatch "^3.1.8" object-assign "^4.1.0" read-pkg-up "^1.0.1" require-main-filename "^1.0.1" @@ -3000,6 +3485,28 @@ to-fast-properties@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + tough-cookie@>=2.3.3, tough-cookie@^2.3.3, tough-cookie@~2.3.0, tough-cookie@~2.3.3: version "2.3.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.3.tgz#0b618a5565b6dea90bf3425d04d55edc475a7561" @@ -3053,10 +3560,36 @@ ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" +union-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^0.4.3" + universalify@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.1.tgz#fa71badd4437af4c148841e3b3b165f9e9e590b7" +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + +use@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.0.tgz#14716bf03fdfefd03040aef58d8b4b85f3a7c544" + dependencies: + kind-of "^6.0.2" + util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -3199,15 +3732,15 @@ yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" -yargs-parser@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-8.1.0.tgz#f1376a33b6629a5d063782944da732631e966950" +yargs-parser@^9.0.2: + version "9.0.2" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-9.0.2.tgz#9ccf6a43460fe4ed40a9bb68f48d43b8a68cc077" dependencies: camelcase "^4.1.0" -yargs@^10.0.3: - version "10.1.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-10.1.2.tgz#454d074c2b16a51a43e2fb7807e4f9de69ccb5c5" +yargs@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-11.0.0.tgz#c052931006c5eee74610e5fc0354bedfd08a201b" dependencies: cliui "^4.0.0" decamelize "^1.1.1" @@ -3220,7 +3753,7 @@ yargs@^10.0.3: string-width "^2.0.0" which-module "^2.0.0" y18n "^3.2.1" - yargs-parser "^8.1.0" + yargs-parser "^9.0.2" yargs@~3.10.0: version "3.10.0" diff --git a/packages/pkg-tests/yarn.test.js b/packages/pkg-tests/yarn.test.js index 43f5cd2899..d3727b592f 100644 --- a/packages/pkg-tests/yarn.test.js +++ b/packages/pkg-tests/yarn.test.js @@ -1,19 +1,66 @@ /* @flow */ +const {delimiter} = require(`path`); + const { tests: {generatePkgDriver, startPackageServer, getPackageRegistry}, exec: {execFile}, } = require(`pkg-tests-core`); -const {basic: basicSpecs, dragon: dragonSpecs, script: scriptSpecs} = require(`pkg-tests-specs`); +const { + basic: basicSpecs, + dragon: dragonSpecs, + lock: lockSpecs, + pnp: pnpSpecs, + script: scriptSpecs, + workspace: workspaceSpecs, +} = require(`pkg-tests-specs`); const pkgDriver = generatePkgDriver({ - runDriver: (path, args, {registryUrl}) => { - const extraArgs = [`--cache-folder`, `${path}/.cache`]; - return execFile(process.execPath, [`${process.cwd()}/../../bin/yarn.js`, ...extraArgs, ...args], { - env: {[`NPM_CONFIG_REGISTRY`]: registryUrl, [`YARN_SILENT`]: `1`}, - cwd: path, - }); + async runDriver( + path, + [command, ...args], + {cwd, projectFolder, registryUrl, plugNPlay, plugnplayShebang, plugnplayBlacklist}, + ) { + let beforeArgs = []; + let middleArgs = []; + + if (projectFolder) { + beforeArgs = [...beforeArgs, `--cwd`, projectFolder]; + } + + if (command === 'install') { + middleArgs = [...middleArgs, `--cache-folder`, `${path}/.cache`]; + } + + const res = await execFile( + process.execPath, + [`${process.cwd()}/../../bin/yarn.js`, ...beforeArgs, command, ...middleArgs, ...args], + { + env: Object.assign( + { + [`NPM_CONFIG_REGISTRY`]: registryUrl, + [`YARN_SILENT`]: `1`, + [`YARN_PROXY`]: ``, + [`YARN_HTTPS_PROXY`]: ``, + [`YARN_PLUGNPLAY_SHEBANG`]: plugnplayShebang || ``, + [`YARN_PLUGNPLAY_BLACKLIST`]: plugnplayBlacklist || ``, + [`PATH`]: `${path}/bin${delimiter}${process.env.PATH}`, + }, + plugNPlay ? {[`YARN_PLUGNPLAY_OVERRIDE`]: plugNPlay ? `1` : `0`} : {}, + ), + cwd: cwd || path, + }, + ); + + if (process.env.JEST_LOG_SPAWNS) { + console.log(`===== stdout:`); + console.log(res.stdout); + console.log(`===== stderr:`); + console.log(res.stderr); + } + + return res; }, }); @@ -23,5 +70,8 @@ beforeEach(async () => { }); basicSpecs(pkgDriver); -dragonSpecs(pkgDriver); +lockSpecs(pkgDriver); scriptSpecs(pkgDriver); +workspaceSpecs(pkgDriver); +pnpSpecs(pkgDriver); +dragonSpecs(pkgDriver); diff --git a/scripts/build-webpack.js b/scripts/build-webpack.js index 6a39025cd9..dc7b68ffae 100755 --- a/scripts/build-webpack.js +++ b/scripts/build-webpack.js @@ -3,6 +3,7 @@ const webpack = require('webpack'); const path = require('path'); +const resolve = require('resolve'); const util = require('util'); const fs = require('fs'); @@ -10,6 +11,52 @@ const version = require('../package.json').version; const basedir = path.join(__dirname, '../'); const babelRc = JSON.parse(fs.readFileSync(path.join(basedir, '.babelrc'), 'utf8')); +var PnpResolver = { + apply: function(resolver) { + resolver.plugin('resolve', function(request, callback) { + if (request.context.issuer === undefined) { + return callback(); + } + + let basedir; + let resolved; + + if (!request.context.issuer) { + basedir = request.path; + } else if (request.context.issuer.startsWith('/')) { + basedir = path.dirname(request.context.issuer); + } else { + throw 42; + } + + try { + resolved = resolve.sync(request.request, {basedir}); + } catch (error) { + // TODO This is not good! But the `debug` package tries to require `supports-color` without declaring it in its + // package.json, and Webpack accepts this because it's in a try/catch, so we need to do it as well. + resolved = false; + } + + this.doResolve(['resolved'], Object.assign({}, request, { + path: resolved, + }), '', callback); + }); + } +}; + +const pnpOptions = fs.existsSync(`${__dirname}/../.pnp.js`) ? { + resolve: { + plugins: [ + PnpResolver, + ] + }, + resolveLoader: { + plugins: [ + PnpResolver, + ] + } +} : {}; + // Use the real node __dirname and __filename in order to get Yarn's source // files on the user's system. See constants.js const nodeOptions = { @@ -31,8 +78,8 @@ const compiler = webpack({ rules: [ { test: /\.js$/, - exclude: /node_modules/, - use: 'babel-loader', + exclude: /node_modules|Caches/, + loader: require.resolve('babel-loader') }, { test: /rx\.lite\.aggregates\.js/, @@ -54,6 +101,7 @@ const compiler = webpack({ }, target: 'node', node: nodeOptions, + ... pnpOptions, }); compiler.run((err, stats) => { @@ -100,6 +148,7 @@ const compilerLegacy = webpack({ }, target: 'node', node: nodeOptions, + ... pnpOptions, }); compilerLegacy.run((err, stats) => { diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 78631ba8fe..347af7f4c2 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -1,9 +1,10 @@ /* @flow */ +import type {RegistryNames} from '../../registries/index.js'; import type {Reporter} from '../../reporters/index.js'; import type {InstallCwdRequest} from './install.js'; import type {DependencyRequestPatterns, Manifest} from '../../types.js'; -import type Config from '../../config.js'; +import type Config, {RootManifests} from '../../config.js'; import type {ListOptions} from './list.js'; import Lockfile from '../../lockfile'; import {normalizePattern} from '../../util/normalize-pattern.js'; @@ -180,10 +181,32 @@ export class Add extends Install { this.addedPatterns = []; const patterns = await Install.prototype.init.call(this); await this.maybeOutputSaveTree(patterns); - await this.savePackages(); return patterns; } + async applyChanges(manifests: RootManifests): Promise { + await Install.prototype.applyChanges.call(this, manifests); + + // fill rootPatternsToOrigin without `excludePatterns` + await Install.prototype.fetchRequestFromCwd.call(this); + + this._iterateAddedPackages((pattern, registry, dependencyType, pkgName, version) => { + // add it to manifest + const {object} = manifests[registry]; + + object[dependencyType] = object[dependencyType] || {}; + object[dependencyType][pkgName] = version; + if ( + SILENCE_DEPENDENCY_TYPE_WARNINGS.indexOf(this.config.commandName) === -1 && + dependencyType !== this.flagToOrigin + ) { + this.reporter.warn(this.reporter.lang('moduleAlreadyInManifest', pkgName, dependencyType, this.flagToOrigin)); + } + }); + + return true; + } + /** * Description */ @@ -234,31 +257,10 @@ export class Add extends Install { * Save added packages to manifest if any of the --save flags were used. */ - async savePackages(): Promise { - // fill rootPatternsToOrigin without `excludePatterns` - await Install.prototype.fetchRequestFromCwd.call(this); - // // get all the different registry manifests in this folder - const manifests: Object = await this.config.getRootManifests(); - - this._iterateAddedPackages((pattern, registry, dependencyType, pkgName, version) => { - // add it to manifest - const {object} = manifests[registry]; - - object[dependencyType] = object[dependencyType] || {}; - object[dependencyType][pkgName] = version; - if ( - SILENCE_DEPENDENCY_TYPE_WARNINGS.indexOf(this.config.commandName) === -1 && - dependencyType !== this.flagToOrigin - ) { - this.reporter.warn(this.reporter.lang('moduleAlreadyInManifest', pkgName, dependencyType, this.flagToOrigin)); - } - }); - - await this.config.saveRootManifests(manifests); - } + async savePackages(): Promise {} _iterateAddedPackages( - f: (pattern: string, registry: string, dependencyType: string, pkgName: string, version: string) => void, + f: (pattern: string, registry: RegistryNames, dependencyType: string, pkgName: string, version: string) => void, ) { const patternOrigins = Object.keys(this.rootPatternsToOrigin); diff --git a/src/cli/commands/bin.js b/src/cli/commands/bin.js index 3f1db7d1ff..c8f419f35c 100644 --- a/src/cli/commands/bin.js +++ b/src/cli/commands/bin.js @@ -3,8 +3,8 @@ import type {Reporter} from '../../reporters/index.js'; import type Config from '../../config.js'; import RegistryYarn from '../../resolvers/registries/yarn-resolver.js'; +import {getBinEntries} from './run.js'; -const fs = require('fs'); const path = require('path'); export function hasWrapper(commander: Object): boolean { @@ -15,18 +15,20 @@ export function setFlags(commander: Object) { commander.description('Displays the location of the yarn bin folder.'); } -export function run(config: Config, reporter: Reporter, flags: Object, args: Array): Promise { +export async function run(config: Config, reporter: Reporter, flags: Object, args: Array): Promise { const binFolder = path.join(config.cwd, config.registries[RegistryYarn.registry].folder, '.bin'); if (args.length === 0) { reporter.log(binFolder, {force: true}); } else { + const binEntries = await getBinEntries(config); + const binName = args[0]; - const finalPath = path.normalize(`${binFolder}/${binName}`); - if (fs.existsSync(finalPath)) { - reporter.log(finalPath, {force: true}); + const binPath = binEntries.get(binName); + + if (binPath) { + reporter.log(binPath, {force: true}); } else { reporter.error(reporter.lang('packageBinaryNotFound', binName)); } } - return Promise.resolve(); } diff --git a/src/cli/commands/cache.js b/src/cli/commands/cache.js index 5a3cc2f27f..240f11a80f 100644 --- a/src/cli/commands/cache.js +++ b/src/cli/commands/cache.js @@ -5,6 +5,7 @@ import type Config from '../../config.js'; import buildSubCommands from './_build-sub-commands.js'; import * as fs from '../../util/fs.js'; +const invariant = require('invariant'); const path = require('path'); const micromatch = require('micromatch'); @@ -12,11 +13,7 @@ export function hasWrapper(flags: Object, args: Array): boolean { return args[0] !== 'dir'; } -function isScopedPackageDirectory(packagePath): boolean { - return packagePath.indexOf('@') > -1; -} - -async function getPackagesPaths(config, currentPath): Object { +export async function getCachedPackagesDirs(config: Config, currentPath: string): Object { const results = []; const stat = await fs.lstat(currentPath); @@ -29,13 +26,33 @@ async function getPackagesPaths(config, currentPath): Object { if (folder[0] === '.') { continue; } - const packagePath = path.join(currentPath, folder); - if (isScopedPackageDirectory(folder)) { - results.push(...(await getPackagesPaths(config, packagePath))); - } else { - results.push(packagePath); + const packageParentPath = path.join(currentPath, folder, 'node_modules'); + + const candidates = await fs.readdir(packageParentPath); + invariant( + candidates.length === 1, + `There should only be one folder in a package cache (got ${candidates.join(',')})`, + ); + + for (const candidate of candidates) { + const candidatePath = path.join(packageParentPath, candidate); + if (candidate.charAt(0) === '@') { + const subCandidates = await fs.readdir(candidatePath); + invariant( + subCandidates.length === 1, + `There should only be one folder in a package cache (got ${subCandidates.join(',')})`, + ); + + for (const subCandidate of subCandidates) { + const subCandidatePath = path.join(candidatePath, subCandidate); + results.push(subCandidatePath); + } + } else { + results.push(candidatePath); + } } } + return results; } @@ -53,7 +70,7 @@ function _getMetadataWithPath(getMetadataFn: Function, paths: Array): Pr } async function getCachedPackages(config): Object { - const paths = await getPackagesPaths(config, config.cacheFolder); + const paths = await getCachedPackagesDirs(config, config.cacheFolder); return _getMetadataWithPath(config.readPackageMetadata.bind(config), paths).then(packages => packages.filter(p => !!p), ); @@ -90,8 +107,13 @@ async function clean(config: Config, reporter: Reporter, flags: Object, args: Ar const packagesToDelete = packages.filter(shouldDelete); for (const manifest of packagesToDelete) { - await fs.unlink(manifest._path); // save package path when retrieving + let relativePath = path.relative(config.cacheFolder, manifest._path); + while (relativePath && relativePath !== '.') { + await fs.unlink(path.resolve(config.cacheFolder, relativePath)); + relativePath = path.dirname(relativePath); + } } + activity.end(); reporter.success(reporter.lang('clearedPackageFromCache', args[0])); } else { diff --git a/src/cli/commands/check.js b/src/cli/commands/check.js index ebc5e7078d..52f5e9e99c 100644 --- a/src/cli/commands/check.js +++ b/src/cli/commands/check.js @@ -91,6 +91,11 @@ export async function verifyTreeCheck( continue; } locationsVisited.add(manifestLoc + `@${dep.version}`); + // When plugnplay is enabled, packages aren't copied to the node_modules folder, so this check doesn't make sense + // TODO: We ideally should check that the packages are located inside the cache instead + if (config.plugnplayEnabled) { + continue; + } if (!await fs.exists(manifestLoc)) { reportError('packageNotInstalled', `${dep.originalKey}`); continue; diff --git a/src/cli/commands/index.js b/src/cli/commands/index.js index e992381aa5..e162956ef4 100644 --- a/src/cli/commands/index.js +++ b/src/cli/commands/index.js @@ -36,6 +36,7 @@ import * as remove from './remove.js'; import * as run from './run.js'; import * as tag from './tag.js'; import * as team from './team.js'; +import * as unplug from './unplug.js'; import * as unlink from './unlink.js'; import * as upgrade from './upgrade.js'; import * as version from './version.js'; @@ -81,6 +82,7 @@ const commands = { run, tag, team, + unplug, unlink, upgrade, version, diff --git a/src/cli/commands/install.js b/src/cli/commands/install.js index 823a12c598..51baa45c89 100644 --- a/src/cli/commands/install.js +++ b/src/cli/commands/install.js @@ -4,7 +4,7 @@ import type {InstallationMethod} from '../../util/yarn-version.js'; import type {Reporter} from '../../reporters/index.js'; import type {ReporterSelectOption} from '../../reporters/types.js'; import type {Manifest, DependencyRequestPatterns} from '../../types.js'; -import type Config from '../../config.js'; +import type Config, {RootManifests} from '../../config.js'; import type {RegistryNames} from '../../registries/index.js'; import type {LockfileObject} from '../../lockfile'; import {callThroughHook} from '../../util/hooks.js'; @@ -26,6 +26,7 @@ import {normalizePattern} from '../../util/normalize-pattern.js'; import * as fs from '../../util/fs.js'; import map from '../../util/map.js'; import {version as YARN_VERSION, getInstallationMethod} from '../../util/yarn-version.js'; +import {generatePnpMap} from '../../util/generate-pnp-map.js'; import WorkspaceLayout from '../../workspace-layout.js'; import ResolutionMap from '../../resolution-map.js'; import guessName from '../../util/guess-name'; @@ -378,6 +379,21 @@ export class Install { stripExcluded(cwdIsRoot ? virtualDependencyManifest : workspaces[projectManifestJson.name].manifest); pushDeps('workspaces', {workspaces: virtualDep}, {hint: 'workspaces', optional: false}, true); + + const implicitWorkspaceDependencies = {...workspaceDependencies}; + + for (const type of constants.OWNED_DEPENDENCY_TYPES) { + for (const dependencyName of Object.keys(projectManifestJson[type] || {})) { + delete implicitWorkspaceDependencies[dependencyName]; + } + } + + pushDeps( + 'dependencies', + {dependencies: implicitWorkspaceDependencies}, + {hint: 'workspaces', optional: false}, + true, + ); } break; @@ -414,6 +430,10 @@ export class Install { } async bailout(patterns: Array, workspaceLayout: ?WorkspaceLayout): Promise { + // PNP is so fast that the integrity check isn't pertinent + if (this.config.plugnplayEnabled) { + return false; + } if (this.flags.skipIntegrityCheck || this.flags.force) { return false; } @@ -586,6 +606,31 @@ export class Install { }), ); + if (this.config.plugnplayEnabled) { + steps.push((curr: number, total: number) => + callThroughHook('pnpStep', async () => { + const pnpPath = `${this.config.lockfileFolder}/${constants.PNP_FILENAME}`; + + const code = await generatePnpMap(this.config, flattenedTopLevelPatterns, { + resolver: this.resolver, + reporter: this.reporter, + targetPath: pnpPath, + workspaceLayout, + }); + + try { + const file = await fs.readFile(pnpPath); + if (file === code) { + return; + } + } catch (error) {} + + await fs.writeFile(pnpPath, code); + await fs.chmod(pnpPath, 0o755); + }), + ); + } + steps.push((curr: number, total: number) => callThroughHook('buildStep', async () => { this.reporter.step( @@ -635,11 +680,47 @@ export class Install { // fin! await this.saveLockfileAndIntegrity(topLevelPatterns, workspaceLayout); + await this.persistChanges(); this.maybeOutputUpdate(); this.config.requestManager.clearCache(); return flattenedTopLevelPatterns; } + async persistChanges(): Promise { + // get all the different registry manifests in this folder + const manifests = await this.config.getRootManifests(); + + if (await this.applyChanges(manifests)) { + await this.config.saveRootManifests(manifests); + } + } + + applyChanges(manifests: RootManifests): Promise { + let hasChanged = false; + + if (this.config.plugnplayPersist) { + const {object} = manifests.npm; + + if (typeof object.installConfig !== 'object') { + object.installConfig = {}; + } + + if (this.config.plugnplayEnabled && object.installConfig.pnp !== true) { + object.installConfig.pnp = true; + hasChanged = true; + } else if (!this.config.plugnplayEnabled && typeof object.installConfig.pnp !== 'undefined') { + delete object.installConfig.pnp; + hasChanged = true; + } + + if (Object.keys(object.installConfig).length === 0) { + delete object.installConfig; + } + } + + return Promise.resolve(hasChanged); + } + /** * Check if we should run the cleaning step. */ @@ -796,13 +877,15 @@ export class Install { } // write integrity hash - await this.integrityChecker.save( - patterns, - lockfileBasedOnResolver, - this.flags, - workspaceLayout, - this.scripts.getArtifacts(), - ); + if (!this.config.plugnplayEnabled) { + await this.integrityChecker.save( + patterns, + lockfileBasedOnResolver, + this.flags, + workspaceLayout, + this.scripts.getArtifacts(), + ); + } // --no-lockfile or --pure-lockfile or --frozen-lockfile if (this.flags.lockfile === false || this.flags.pureLockfile || this.flags.frozenLockfile) { diff --git a/src/cli/commands/node.js b/src/cli/commands/node.js index 2f994e5787..a8c752c1a1 100644 --- a/src/cli/commands/node.js +++ b/src/cli/commands/node.js @@ -3,19 +3,32 @@ import type Config from '../../config.js'; import type {Reporter} from '../../reporters/index.js'; import * as child from '../../util/child.js'; -import {NODE_BIN_PATH} from '../../constants'; +import * as fs from '../../util/fs.js'; +import {NODE_BIN_PATH, PNP_FILENAME} from '../../constants'; -export function setFlags(commander: Object) {} +export function setFlags(commander: Object) { + commander.description( + 'Runs Node with the same version that the one used by Yarn itself, and by default from the project root', + ); + commander.usage('node [--into PATH] [... args]'); + commander.option('--into ', 'Sets the cwd to the specified location'); +} export function hasWrapper(commander: Object, args: Array): boolean { return true; } export async function run(config: Config, reporter: Reporter, flags: Object, args: Array): Promise { + const pnpPath = `${config.lockfileFolder}/${PNP_FILENAME}`; + + if (await fs.exists(pnpPath)) { + args = ['-r', pnpPath, ...args]; + } + try { await child.spawn(NODE_BIN_PATH, args, { stdio: 'inherit', - cwd: config.cwd, + cwd: flags.into || config.cwd, }); } catch (err) { throw err; diff --git a/src/cli/commands/run.js b/src/cli/commands/run.js index 30b62d61b5..ccc7915df1 100644 --- a/src/cli/commands/run.js +++ b/src/cli/commands/run.js @@ -3,15 +3,63 @@ import type {Reporter} from '../../reporters/index.js'; import type Config from '../../config.js'; import {execCommand, makeEnv} from '../../util/execute-lifecycle-script.js'; +import {dynamicRequire} from '../../util/dynamic-require.js'; import {MessageError} from '../../errors.js'; import {registries} from '../../resolvers/index.js'; import * as fs from '../../util/fs.js'; -import map from '../../util/map.js'; +import * as constants from '../../constants.js'; +const invariant = require('invariant'); const leven = require('leven'); const path = require('path'); const {quoteForShell, sh, unquoted} = require('puka'); +function toObject(input: Map): Object { + const output = Object.create(null); + + for (const [key, val] of input.entries()) { + output[key] = val; + } + + return output; +} + +export async function getBinEntries(config: Config): Promise> { + const binFolders = new Set(); + const binEntries = new Map(); + + // Setup the node_modules/.bin folders for analysis + for (const registry of Object.keys(registries)) { + binFolders.add(path.join(config.cwd, config.registries[registry].folder, '.bin')); + } + + // Same thing, but for the pnp dependencies, located inside the cache + if (await fs.exists(`${config.lockfileFolder}/${constants.PNP_FILENAME}`)) { + const pnpApi = dynamicRequire(`${config.lockfileFolder}/${constants.PNP_FILENAME}`); + const topLevelInformation = pnpApi.getPackageInformation({name: null, reference: null}); + + for (const [name, reference] of topLevelInformation.packageDependencies.entries()) { + const dependencyInformation = pnpApi.getPackageInformation({name, reference}); + + if (dependencyInformation.packageLocation) { + const fullPath = path.resolve(config.lockfileFolder, dependencyInformation.packageLocation); + binFolders.add(`${fullPath}/.bin`); + } + } + } + + // Build up a list of possible scripts by exploring the folders marked for analysis + for (const binFolder of binFolders) { + if (await fs.exists(binFolder)) { + for (const name of await fs.readdir(binFolder)) { + binEntries.set(name, path.join(binFolder, name)); + } + } + } + + return binEntries; +} + export function setFlags(commander: Object) { commander.description('Runs a defined package script.'); } @@ -21,36 +69,25 @@ export function hasWrapper(commander: Object, args: Array): boolean { } export async function run(config: Config, reporter: Reporter, flags: Object, args: Array): Promise { - // build up a list of possible scripts const pkg = await config.readManifest(config.cwd); - const scripts = map(); - const binCommands = []; - const visitedBinFolders = new Set(); - let pkgCommands = []; - for (const registry of Object.keys(registries)) { - const binFolder = path.join(config.cwd, config.registries[registry].folder, '.bin'); - if (!visitedBinFolders.has(binFolder)) { - if (await fs.exists(binFolder)) { - for (const name of await fs.readdir(binFolder)) { - binCommands.push(name); - scripts[name] = quoteForShell(path.join(binFolder, name)); - } - } - visitedBinFolders.add(binFolder); - } + + const binCommands = new Set(); + const pkgCommands = new Set(); + + const scripts: Map = new Map(); + + for (const [name, loc] of await getBinEntries(config)) { + scripts.set(name, quoteForShell(loc)); + binCommands.add(name); } + const pkgScripts = pkg.scripts; - const cmdHints = {}; - if (pkgScripts) { - // inherit `scripts` from manifest - pkgCommands = Object.keys(pkgScripts).sort(); - // add command hints (what the actual yarn command will do) - for (const cmd of pkgCommands) { - cmdHints[cmd] = pkgScripts[cmd] || ''; + if (pkgScripts) { + for (const name of Object.keys(pkgScripts).sort()) { + scripts.set(name, pkgScripts[name] || ''); + pkgCommands.add(name); } - - Object.assign(scripts, pkgScripts); } async function runCommand(args): Promise { @@ -65,14 +102,18 @@ export async function run(config: Config, reporter: Reporter, flags: Object, arg cmds.push([preAction, pkgScripts[preAction]]); } - cmds.push([action, scripts[action]]); + const script = scripts.get(action); + invariant(script, 'Script must exist'); + cmds.push([action, script]); const postAction = `post${action}`; if (postAction in pkgScripts) { cmds.push([postAction, pkgScripts[postAction]]); } - } else if (scripts[action]) { - cmds.push([action, scripts[action]]); + } else if (scripts.has(action)) { + const script = scripts.get(action); + invariant(script, 'Script must exist'); + cmds.push([action, script]); } if (cmds.length) { @@ -86,7 +127,7 @@ export async function run(config: Config, reporter: Reporter, flags: Object, arg stage, config, cmd: cmdWithArgs, - cwd: config.cwd, + cwd: flags.into || config.cwd, isInteractive: true, customShell: customShell ? String(customShell) : undefined, }); @@ -113,15 +154,23 @@ export async function run(config: Config, reporter: Reporter, flags: Object, arg // list possible scripts if none specified if (args.length === 0) { - if (binCommands.length) { - reporter.info(`${reporter.lang('binCommands') + binCommands.join(', ')}`); + if (binCommands.size > 0) { + reporter.info(`${reporter.lang('binCommands') + Array.from(binCommands).join(', ')}`); } else { reporter.error(reporter.lang('noBinAvailable')); } - if (pkgCommands.length) { + const printedCommands: Map = new Map(); + + for (const pkgCommand of pkgCommands) { + const action = scripts.get(pkgCommand); + invariant(action, 'Action must exists'); + printedCommands.set(pkgCommand, action); + } + + if (pkgCommands.size > 0) { reporter.info(`${reporter.lang('possibleCommands')}`); - reporter.list('possibleCommands', pkgCommands, cmdHints); + reporter.list('possibleCommands', Array.from(pkgCommands), toObject(printedCommands)); if (!flags.nonInteractive) { await reporter .question(reporter.lang('commandQuestion')) diff --git a/src/cli/commands/unplug.js b/src/cli/commands/unplug.js new file mode 100644 index 0000000000..34381a9526 --- /dev/null +++ b/src/cli/commands/unplug.js @@ -0,0 +1,80 @@ +/* @flow */ + +import type {Reporter} from '../../reporters/index.js'; +import type Config from '../../config.js'; +import Lockfile from '../../lockfile'; +import {wrapLifecycle, Install} from './install.js'; +import {MessageError} from '../../errors.js'; +import * as fs from '../../util/fs.js'; + +const path = require('path'); + +export function hasWrapper(commander: Object): boolean { + return true; +} + +export function setFlags(commander: Object) { + commander.description( + 'Temporarily copies a package (with an optional @range suffix) outside of the global cache for debugging purposes', + ); + commander.usage('unplug [packages ...] [flags]'); + commander.option('--clear', 'Delete the selected packages'); + commander.option('--clear-all', 'Delete all unplugged packages'); +} + +export async function run(config: Config, reporter: Reporter, flags: Object, args: Array): Promise { + if (!config.plugnplayEnabled) { + throw new MessageError(reporter.lang('unplugDisabled')); + } + if (!args.length && flags.clear) { + throw new MessageError(reporter.lang('tooFewArguments', 1)); + } + if (args.length && flags.clearAll) { + throw new MessageError(reporter.lang('noArguments')); + } + + if (flags.clearAll) { + await clearAll(config); + } else if (flags.clear) { + await clearSome(config, new Set(args)); + } else if (args.length > 0) { + const lockfile = await Lockfile.fromDirectory(config.lockfileFolder, reporter); + await wrapLifecycle(config, flags, async () => { + const install = new Install(flags, config, reporter, lockfile); + install.linker.unplugged = args; + await install.init(); + }); + } + + const unpluggedPackageFolders = await config.listUnpluggedPackageFolders(); + + for (const target of unpluggedPackageFolders.values()) { + reporter.log(target, {force: true}); + } +} + +export async function clearSome(config: Config, filters: Set): Promise { + const unpluggedPackageFolders = await config.listUnpluggedPackageFolders(); + const removeList = []; + + for (const [unpluggedName, target] of unpluggedPackageFolders.entries()) { + const {name} = await fs.readJson(path.join(target, 'package.json')); + const toBeRemoved = filters.has(name); + + if (toBeRemoved) { + removeList.push(path.join(config.getUnpluggedPath(), unpluggedName)); + } + } + + if (removeList.length === unpluggedPackageFolders.size) { + await fs.unlink(config.getUnpluggedPath()); + } else { + for (const unpluggedPackagePath of removeList) { + await fs.unlink(unpluggedPackagePath); + } + } +} + +export async function clearAll(config: Config): Promise { + await fs.unlink(config.getUnpluggedPath()); +} diff --git a/src/cli/index.js b/src/cli/index.js index e1c1bb7e64..5b2ea0e050 100644 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -71,6 +71,8 @@ export async function main({ commander.option('--verbose', 'output verbose messages on internal operations'); commander.option('--offline', 'trigger an error if any required dependencies are not available in local cache'); commander.option('--prefer-offline', 'use network only if dependencies are not available in local cache'); + commander.option('--enable-pnp, --pnp', "enable the Plug'n'Play installation"); + commander.option('--disable-pnp', "disable the Plug'n'Play installation"); commander.option('--strict-semver'); commander.option('--json', 'format Yarn log messages as lines of JSON (see jsonlines.org)'); commander.option('--ignore-scripts', "don't run lifecycle scripts"); @@ -186,13 +188,18 @@ export async function main({ const PROXY_COMMANDS = new Set([`run`, `create`, `node`]); if (PROXY_COMMANDS.has(commandName)) { if (endArgs.length === 0) { + let preservedArgs = 0; // the "run" and "create" command take one argument that we want to parse as usual (the // script/package name), hence the splice(1) if (command === commands.run || command === commands.create) { - endArgs = ['--', ...args.splice(1)]; - } else { - endArgs = ['--', ...args]; + preservedArgs += 1; + } + // If the --into option immediately follows the command (or the script name in the "run/create" + // case), we parse them as regular options so that we can cd into them + if (args[preservedArgs] === `--into`) { + preservedArgs += 2; } + endArgs = ['--', ...args.splice(preservedArgs)]; } else { warnAboutRunDashDash = true; } @@ -494,6 +501,8 @@ export async function main({ cwd, commandName, + enablePnp: commander.pnp, + disablePnp: commander.disablePnp, enableDefaultRc: commander.defaultRc, extraneousYarnrcFiles: commander.useYarnrc, binLinks: commander.binLinks, @@ -516,7 +525,6 @@ export async function main({ networkConcurrency: commander.networkConcurrency, networkTimeout: commander.networkTimeout, nonInteractive: commander.nonInteractive, - scriptsPrependNodePath: commander.scriptsPrependNodePath, updateChecksums: commander.updateChecksums, focus: commander.focus, }) diff --git a/src/config.js b/src/config.js index 953ae2c702..4374375b9b 100644 --- a/src/config.js +++ b/src/config.js @@ -47,7 +47,10 @@ export type ConfigOptions = { childConcurrency?: number, networkTimeout?: number, nonInteractive?: boolean, + enablePnp?: boolean, + disablePnp?: boolean, scriptsPrependNodePath?: boolean, + offlineCacheFolder?: string, enableDefaultRc?: boolean, extraneousYarnrcFiles?: Array, @@ -74,7 +77,7 @@ type PackageMetadata = { package: Manifest, }; -type RootManifests = { +export type RootManifests = { [registryName: RegistryNames]: { loc: string, indent: ?string, @@ -167,11 +170,20 @@ export default class Config { nonInteractive: boolean; + plugnplayPersist: boolean; + plugnplayEnabled: boolean; + plugnplayShebang: ?string; + plugnplayBlacklist: ?string; + plugnplayUnplugged: Array; + plugnplayPurgeUnpluggedPackages: boolean; + scriptsPrependNodePath: boolean; workspacesEnabled: boolean; workspacesNohoistEnabled: boolean; + offlineCacheFolder: ?string; + // cwd: string; workspaceRootFolder: ?string; @@ -365,16 +377,47 @@ export default class Config { } else { this._cacheRootFolder = String(cacheRootFolder); } + + const manifest = await this.maybeReadManifest(this.cwd); + + const plugnplayByEnv = this.getOption('plugnplay-override'); + if (plugnplayByEnv != null) { + this.plugnplayEnabled = plugnplayByEnv !== 'false' && plugnplayByEnv !== '0'; + this.plugnplayPersist = false; + } else if (opts.enablePnp || opts.disablePnp) { + this.plugnplayEnabled = !!opts.enablePnp; + this.plugnplayPersist = true; + } else if (manifest && manifest.installConfig && manifest.installConfig.pnp) { + this.plugnplayEnabled = !!manifest.installConfig.pnp; + this.plugnplayPersist = false; + } else { + this.plugnplayEnabled = false; + this.plugnplayEnabled = false; + } + + if (process.platform === 'win32') { + if (this.plugnplayEnabled) { + this.reporter.warn(this.reporter.lang('plugnplayWindowsSupport')); + } + this.plugnplayEnabled = false; + this.plugnplayPersist = false; + } + + this.plugnplayShebang = String(this.getOption('plugnplay-shebang') || '') || '/usr/bin/env node'; + this.plugnplayBlacklist = String(this.getOption('plugnplay-blacklist') || '') || null; + this.workspacesEnabled = this.getOption('workspaces-experimental') !== false; this.workspacesNohoistEnabled = this.getOption('workspaces-nohoist-experimental') !== false; + this.offlineCacheFolder = String(this.getOption('offline-cache-folder') || '') || null; + this.pruneOfflineMirror = Boolean(this.getOption('yarn-offline-mirror-pruning')); this.enableMetaFolder = Boolean(this.getOption('enable-meta-folder')); this.enableLockfileVersions = Boolean(this.getOption('yarn-enable-lockfile-versions')); this.linkFileDependencies = Boolean(this.getOption('yarn-link-file-dependencies')); this.packBuiltPackages = Boolean(this.getOption('experimental-pack-script-packages-in-mirror')); - this.autoAddIntegrity = !Boolean(this.getOption('unsafe-disable-integrity-migration')); + this.autoAddIntegrity = !this.getOption('unsafe-disable-integrity-migration'); //init & create cacheFolder, tempFolder this.cacheFolder = path.join(this._cacheRootFolder, 'v' + String(constants.CACHE_VERSION)); @@ -422,6 +465,8 @@ export default class Config { this.offline = !!opts.offline; this.binLinks = !!opts.binLinks; this.updateChecksums = !!opts.updateChecksums; + this.plugnplayUnplugged = []; + this.plugnplayPurgeUnpluggedPackages = false; this.ignorePlatform = !!opts.ignorePlatform; this.ignoreScripts = !!opts.ignoreScripts; @@ -446,6 +491,37 @@ export default class Config { this.focusedWorkspaceName = ''; } + /** + * Generate a name suitable as unique filesystem identifier for the specified package. + */ + + generateUniquePackageSlug(pkg: PackageReference): string { + let slug = pkg.name; + + slug = slug.replace(/[^@a-z0-9]+/g, '-'); + slug = slug.replace(/^-+|-+$/g, ''); + + if (pkg.registry) { + slug = `${pkg.registry}-${slug}`; + } else { + slug = `unknown-${slug}`; + } + + const {hash} = pkg.remote; + + if (pkg.version) { + slug += `-${pkg.version}`; + } + + if (pkg.uid && pkg.version !== pkg.uid) { + slug += `-${pkg.uid}`; + } else if (hash) { + slug += `-${hash}`; + } + + return slug; + } + /** * Generate an absolute module path. */ @@ -454,21 +530,45 @@ export default class Config { invariant(this.cacheFolder, 'No package root'); invariant(pkg, 'Undefined package'); - let name = pkg.name; - let uid = pkg.uid; - if (pkg.registry) { - name = `${pkg.registry}-${name}`; + const slug = this.generateUniquePackageSlug(pkg); + return path.join(this.cacheFolder, slug, 'node_modules', pkg.name); + } + + /** + */ + + getUnpluggedPath(): string { + return path.join(this.lockfileFolder, '.pnp', 'unplugged'); + } + + /** + */ + + generatePackageUnpluggedPath(pkg: PackageReference): string { + const slug = this.generateUniquePackageSlug(pkg); + return path.join(this.getUnpluggedPath(), slug, 'node_modules', pkg.name); + } + + /** + */ + + async listUnpluggedPackageFolders(): Promise> { + const unpluggedPackages = new Map(); + const unpluggedPath = this.getUnpluggedPath(); + + if (!await fs.exists(unpluggedPath)) { + return unpluggedPackages; } - const {hash} = pkg.remote; + for (const unpluggedName of await fs.readdir(unpluggedPath)) { + const nmListing = await fs.readdir(path.join(unpluggedPath, unpluggedName, 'node_modules')); + invariant(nmListing.length === 1, 'A single folder should be in the unplugged directory'); - if (pkg.version && pkg.version !== pkg.uid) { - uid = `${pkg.version}-${uid}`; - } else if (hash) { - uid += `-${hash}`; + const target = path.join(unpluggedPath, unpluggedName, `node_modules`, nmListing[0]); + unpluggedPackages.set(unpluggedName, target); } - return path.join(this.cacheFolder, `${name}-${uid}`); + return unpluggedPackages; } /** diff --git a/src/constants.js b/src/constants.js index 969b23207f..e93a098c59 100644 --- a/src/constants.js +++ b/src/constants.js @@ -11,6 +11,8 @@ type Env = { }; export const DEPENDENCY_TYPES = ['devDependencies', 'dependencies', 'optionalDependencies', 'peerDependencies']; +export const OWNED_DEPENDENCY_TYPES = ['devDependencies', 'dependencies', 'optionalDependencies']; + export const RESOLUTIONS = 'resolutions'; export const MANIFEST_FIELDS = [RESOLUTIONS, ...DEPENDENCY_TYPES]; @@ -26,7 +28,7 @@ export const YARN_INSTALLER_MSI = 'https://yarnpkg.com/latest.msi'; export const SELF_UPDATE_VERSION_URL = 'https://yarnpkg.com/latest-version'; // cache version, bump whenever we make backwards incompatible changes -export const CACHE_VERSION = 2; +export const CACHE_VERSION = 3; // lockfile version, bump whenever we make backwards incompatible changes export const LOCKFILE_VERSION = 1; @@ -76,6 +78,8 @@ function getYarnBinPath(): string { export const NODE_MODULES_FOLDER = 'node_modules'; export const NODE_PACKAGE_JSON = 'package.json'; +export const PNP_FILENAME = '.pnp.js'; + export const POSIX_GLOBAL_PREFIX = `${process.env.DESTDIR || ''}/usr/local`; export const FALLBACK_GLOBAL_PREFIX = path.join(userHome, '.yarn'); diff --git a/src/fetchers/base-fetcher.js b/src/fetchers/base-fetcher.js index 7ddbfcd492..94b43fbea4 100644 --- a/src/fetchers/base-fetcher.js +++ b/src/fetchers/base-fetcher.js @@ -6,6 +6,7 @@ import type {PackageRemote, FetchedMetadata, FetchedOverride} from '../types.js' import type {RegistryNames} from '../registries/index.js'; import type Config from '../config.js'; import normalizeManifest from '../util/normalize-manifest/index.js'; +import {makePortableProxyScript} from '../util/portable-script.js'; import * as constants from '../constants.js'; import * as fs from '../util/fs.js'; @@ -62,6 +63,24 @@ export default class BaseFetcher { } })(); + if (pkg.bin) { + for (const binName of Object.keys(pkg.bin)) { + const binDest = `${this.dest}/.bin`; + + // Using any sort of absolute path here would prevent makePortableProxyScript from preserving symlinks when + // calling the binary + const src = path.resolve(this.dest, pkg.bin[binName]); + + if (await fs.exists(src)) { + // We ensure that the target is executable + await fs.chmod(src, 0o755); + } + + await fs.mkdirp(binDest); + await fs.symlink(src, `${binDest}/${binName}`); + } + } + await fs.writeFile( path.join(this.dest, constants.METADATA_FILENAME), JSON.stringify( diff --git a/src/integrity-checker.js b/src/integrity-checker.js index 122146b995..3f4ab4dbf1 100644 --- a/src/integrity-checker.js +++ b/src/integrity-checker.js @@ -248,6 +248,10 @@ export default class InstallationIntegrityChecker { result.flags.push('production'); } + if (this.config.plugnplayEnabled) { + result.flags.push('plugnplay'); + } + const linkedModules = this.config.linkedModules; if (linkedModules.length) { diff --git a/src/package-hoister.js b/src/package-hoister.js index eb2ba8f8a8..7df285936f 100644 --- a/src/package-hoister.js +++ b/src/package-hoister.js @@ -945,7 +945,7 @@ export class NohoistResolver { * algorithm: a nohoist package should never be hoisted beyond the top of its branch, i.e. * the first element of its parts. Therefore the highest possible hoisting index is 1, * unless the package has only 1 part (itself), in such case returns null just like any hoisted package - * + * */ highestHoistingPoint = (info: HoistManifest): ?number => { @@ -954,13 +954,16 @@ export class NohoistResolver { // private functions _isNohoist = (info: HoistManifest): boolean => { - if (!info.nohoistList || info.nohoistList.length <= 0) { + if (this._isTopPackage(info)) { return false; } - const path = this._originalPath(info); - - // top package can not be marked 'nohoist' because it is already at the top (hoisted). - return !this._isTopPackage(info) && mm.any(path, info.nohoistList); + if (info.nohoistList && info.nohoistList.length > 0 && mm.any(this._originalPath(info), info.nohoistList)) { + return true; + } + if (this._config.plugnplayEnabled) { + return true; + } + return false; }; _isRootPackage = (pkg: Manifest): boolean => { return pkg.name === this._wsRootPackageName; @@ -970,7 +973,8 @@ export class NohoistResolver { }; _makePath(...args: Array): string { const parts = args.map(s => (s === this._wsRootPackageName ? WS_ROOT_ALIAS : s)); - return parts.join('/'); + const result = parts.join('/'); + return result[0] === '/' ? result : '/' + result; } _isTopPackage = (info: HoistManifest): boolean => { const parentParts = info.parts.slice(0, -1); diff --git a/src/package-linker.js b/src/package-linker.js index 5e7bda54c6..c0f69d921a 100644 --- a/src/package-linker.js +++ b/src/package-linker.js @@ -1,6 +1,7 @@ /* @flow */ import type {Manifest} from './types.js'; +import type PackageReference from './package-reference.js'; import type PackageResolver from './package-resolver.js'; import type {Reporter} from './reporters/index.js'; import type Config from './config.js'; @@ -10,6 +11,7 @@ import type {InstallArtifacts} from './package-install-scripts.js'; import PackageHoister from './package-hoister.js'; import * as constants from './constants.js'; import * as promise from './util/promise.js'; +import {normalizePattern} from './util/normalize-pattern.js'; import {entries} from './util/misc.js'; import * as fs from './util/fs.js'; import lockMutex from './util/mutex.js'; @@ -19,6 +21,7 @@ import WorkspaceLayout from './workspace-layout.js'; const invariant = require('invariant'); const cmdShim = require('@zkochan/cmd-shim'); const path = require('path'); +const semver = require('semver'); // Concurrency for creating bin links disabled because of the issue #1961 const linkBinConcurrency = 1; @@ -49,6 +52,7 @@ export default class PackageLinker { this.config = config; this.artifacts = {}; this.topLevelBinLinking = true; + this.unplugged = []; } artifacts: InstallArtifacts; @@ -56,6 +60,7 @@ export default class PackageLinker { resolver: PackageResolver; config: Config; topLevelBinLinking: boolean; + unplugged: Array; _treeHash: ?Map; setArtifacts(artifacts: InstallArtifacts) { @@ -141,6 +146,7 @@ export default class PackageLinker { // write the executables for (const {dep, loc} of deps) { if (dep._reference && dep._reference.locations.length) { + invariant(!dep._reference.isPlugnplay, "Plug'n'play packages should not be referenced here"); await this.linkSelfDependencies(dep, loc, dir); } } @@ -270,6 +276,22 @@ export default class PackageLinker { } } + if (this.config.plugnplayEnabled) { + ref.isPlugnplay = true; + if (await this._isUnplugged(pkg, ref)) { + dest = this.config.generatePackageUnpluggedPath(ref); + + // We don't skip the copy if the unplugged package isn't materialized yet + if (await fs.exists(dest)) { + ref.addLocation(dest); + continue; + } + } else { + ref.addLocation(src); + continue; + } + } + ref.addLocation(dest); const integrityArtifacts = this.artifacts[`${pkg.name}@${pkg.version}`]; @@ -434,6 +456,7 @@ export default class PackageLinker { } }, }); + await fs.hardlinkBulk(Array.from(hardlinkQueue.values()), this.reporter, { possibleExtraneous, artifactFiles, @@ -472,7 +495,7 @@ export default class PackageLinker { await promise.queue( flatTree, async ([dest, {pkg, isNohoist, parts}]) => { - if (pkg._reference && pkg._reference.locations.length) { + if (pkg._reference && pkg._reference.locations.length && !pkg._reference.isPlugnplay) { const binLoc = path.join(dest, this.config.getFolder(pkg)); await this.linkBinDependencies(pkg, binLoc); if (isNohoist) { @@ -482,6 +505,7 @@ export default class PackageLinker { } tickBin(); } + tickBin(); }, linkBinConcurrency, ); @@ -490,7 +514,13 @@ export default class PackageLinker { await promise.queue( topLevelDependencies, async ([dest, {pkg}]) => { - if (pkg._reference && pkg._reference.locations.length && pkg.bin && Object.keys(pkg.bin).length) { + if ( + pkg._reference && + pkg._reference.locations.length && + !pkg._reference.isPlugnplay && + pkg.bin && + Object.keys(pkg.bin).length + ) { let binLoc; if (this.config.modulesFolder) { binLoc = path.join(this.config.modulesFolder); @@ -498,8 +528,8 @@ export default class PackageLinker { binLoc = path.join(this.config.lockfileFolder, this.config.getFolder(pkg)); } await this.linkSelfDependencies(pkg, dest, binLoc); - tickBin(); } + tickBin(); }, linkBinConcurrency, ); @@ -653,6 +683,25 @@ export default class PackageLinker { } } + async _isUnplugged(pkg: Manifest, ref: PackageReference): Promise { + // If an unplugged folder exists for the specified package, we simply use it + if (await fs.exists(this.config.generatePackageUnpluggedPath(ref))) { + return true; + } + + // If the package has a postinstall script, we also unplug it (otherwise they would run into the cache) + if (pkg.scripts && (pkg.scripts.preinstall || pkg.scripts.install || pkg.scripts.postinstall)) { + return true; + } + + // Check whether the user explicitly requested for the package to be unplugged + return this.unplugged.some(patternToUnplug => { + const {name, range, hasVersion} = normalizePattern(patternToUnplug); + const satisfiesSemver = hasVersion ? semver.satisfies(ref.version, range) : true; + return name === ref.name && satisfiesSemver; + }); + } + async init( patterns: Array, workspaceLayout?: WorkspaceLayout, @@ -660,5 +709,9 @@ export default class PackageLinker { ): Promise { this.resolvePeerModules(); await this.copyModules(patterns, workspaceLayout, {linkDuplicates, ignoreOptional}); + + if (!this.config.plugnplayEnabled) { + await fs.unlink(`${this.config.lockfileFolder}/${constants.PNP_FILENAME}`); + } } } diff --git a/src/package-reference.js b/src/package-reference.js index 560168c41c..42de0226d2 100644 --- a/src/package-reference.js +++ b/src/package-reference.js @@ -17,6 +17,8 @@ export default class PackageReference { this.config = request.config; this.hint = request.hint; + this.isPlugnplay = false; + this.registry = remote.registry; this.version = info.version; this.name = info.name; @@ -41,6 +43,7 @@ export default class PackageReference { lockfile: Lockfile; config: Config; + isPlugnplay: boolean; level: number; name: string; version: string; @@ -63,7 +66,9 @@ export default class PackageReference { } addLocation(loc: string) { - this.locations.push(loc); + if (this.locations.indexOf(loc) === -1) { + this.locations.push(loc); + } } addRequest(request: PackageRequest) { diff --git a/src/rc.js b/src/rc.js index fe19259d95..98e71a28f3 100644 --- a/src/rc.js +++ b/src/rc.js @@ -9,7 +9,14 @@ import {parse} from './lockfile'; import * as rcUtil from './util/rc.js'; // Keys that will get resolved relative to the path of the rc file they belong to -const PATH_KEYS = new Set(['yarn-path', 'cache-folder', 'global-folder', 'modules-folder', 'cwd']); +const PATH_KEYS = new Set([ + 'yarn-path', + 'cache-folder', + 'global-folder', + 'modules-folder', + 'cwd', + 'offline-cache-folder', +]); // given a cwd, load all .yarnrc files relative to it export function getRcConfigForCwd(cwd: string, args: Array): {[key: string]: string} { diff --git a/src/registries/yarn-registry.js b/src/registries/yarn-registry.js index 8c58760296..020638afa5 100644 --- a/src/registries/yarn-registry.js +++ b/src/registries/yarn-registry.js @@ -31,7 +31,7 @@ export const DEFAULTS = { 'user-agent': [`yarn/${version}`, 'npm/?', `node/${process.version}`, process.platform, process.arch].join(' '), }; -const RELATIVE_KEYS = ['yarn-offline-mirror', 'cache-folder']; +const RELATIVE_KEYS = ['yarn-offline-mirror', 'cache-folder', 'offline-cache-folder']; const npmMap = { 'version-git-sign': 'sign-git-tag', diff --git a/src/reporters/lang/en.js b/src/reporters/lang/en.js index 346d002cd4..03b4739491 100644 --- a/src/reporters/lang/en.js +++ b/src/reporters/lang/en.js @@ -355,6 +355,11 @@ const messages = { downloadGitWithoutCommit: 'Downloading the git repo $0 over plain git without a commit hash', downloadHTTPWithoutCommit: 'Downloading the git repo $0 over HTTP without a commit hash', + unplugDisabled: "Packages can only be unplugged when Plug'n'Play is enabled.", + + plugnplayWindowsSupport: + "Plug'n'Play is ignored on Windows for now - contributions welcome! https://github.com/yarnpkg/yarn/issues/6402", + packageInstalledWithBinaries: 'Installed $0 with binaries:', packageHasBinaries: '$0 has binaries:', packageHasNoBinaries: '$0 has no binaries', diff --git a/src/resolvers/registries/npm-resolver.js b/src/resolvers/registries/npm-resolver.js index 4d31b09aa5..6d9ae791ee 100644 --- a/src/resolvers/registries/npm-resolver.js +++ b/src/resolvers/registries/npm-resolver.js @@ -1,11 +1,12 @@ /* @flow */ +import {getCachedPackagesDirs} from '../../cli/commands/cache.js'; import type {Manifest} from '../../types.js'; import type Config from '../../config.js'; import type PackageRequest from '../../package-request.js'; import {MessageError} from '../../errors.js'; import RegistryResolver from './registry-resolver.js'; -import NpmRegistry, {SCOPE_SEPARATOR} from '../../registries/npm-registry.js'; +import NpmRegistry from '../../registries/npm-registry.js'; import map from '../../util/map.js'; import * as fs from '../../util/fs.js'; import {YARN_REGISTRY, NPM_REGISTRY_RE} from '../../constants.js'; @@ -13,7 +14,6 @@ import {getPlatformSpecificPackageFilename} from '../../util/package-name-utils. const inquirer = require('inquirer'); const tty = require('tty'); -const invariant = require('invariant'); const path = require('path'); const semver = require('semver'); const ssri = require('ssri'); @@ -101,55 +101,18 @@ export default class NpmResolver extends RegistryResolver { } async resolveRequestOffline(): Promise { - const escapedName = NpmRegistry.escapeName(this.name); - const scope = this.config.registries.npm.getScope(escapedName); - - // find modules of this name - const prefix = scope ? escapedName.split(SCOPE_SEPARATOR)[1] : `${NPM_REGISTRY_ID}-${this.name}-`; - - invariant(this.config.cacheFolder, 'expected packages root'); - const cacheFolder = path.join(this.config.cacheFolder, scope ? `${NPM_REGISTRY_ID}-${scope}` : ''); - - const files = await this.config.getCache('cachedPackages', async (): Promise> => { - // Try to read the folder. - let files = []; - try { - files = await fs.readdir(cacheFolder); - } catch (err) { - if (err.code === 'ENOENT') { - return []; - } - throw err; - } - - const validFiles = []; - - for (const name of files) { - // no hidden files - if (name[0] === '.') { - continue; - } - - // ensure valid module cache - const dir = path.join(cacheFolder, name); - if (await this.config.isValidModuleDest(dir)) { - validFiles.push(name); - } - } - - return validFiles; + const packageDirs = await this.config.getCache('cachedPackages', (): Promise> => { + return getCachedPackagesDirs(this.config, this.config.cacheFolder); }); const versions = map(); - for (const name of files) { - // check if folder starts with our prefix - if (name.indexOf(prefix) !== 0) { + for (const dir of packageDirs) { + // check if folder contains the registry prefix + if (dir.indexOf(`${NPM_REGISTRY_ID}-`) === -1) { continue; } - const dir = path.join(cacheFolder, name); - // read manifest and validate correct name const pkg = await this.config.readManifest(dir, NPM_REGISTRY_ID); if (pkg.name !== this.name) { diff --git a/src/types.js b/src/types.js index 843324d914..092e9b20cd 100644 --- a/src/types.js +++ b/src/types.js @@ -135,6 +135,10 @@ export type Manifest = { bundleDependencies?: Array, bundledDependencies?: Array, + installConfig?: { + pnp?: boolean, + }, + deprecated?: string, files?: Array, main?: string, diff --git a/src/util/dynamic-require.js b/src/util/dynamic-require.js new file mode 100644 index 0000000000..0fef3c653d --- /dev/null +++ b/src/util/dynamic-require.js @@ -0,0 +1,4 @@ +/* @flow */ + +// $FlowFixMe We want this require to be dynamic +exports.dynamicRequire = typeof __webpack_require__ !== 'undefined' ? __non_webpack_require__ : require; // eslint-disable-line diff --git a/src/util/execute-lifecycle-script.js b/src/util/execute-lifecycle-script.js index b2e9594dff..31d7dd3be4 100644 --- a/src/util/execute-lifecycle-script.js +++ b/src/util/execute-lifecycle-script.js @@ -4,7 +4,9 @@ import type Config from '../config.js'; import {MessageError, ProcessTermError} from '../errors.js'; import * as constants from '../constants.js'; import * as child from './child.js'; -import {exists} from './fs.js'; +import * as fs from './fs.js'; +import {dynamicRequire} from './dynamic-require.js'; +import {makePortableProxyScript} from './portable-script.js'; import {registries} from '../resolvers/index.js'; import {fixCmdWinSlashes} from './fix-cmd-win-slashes.js'; import {getBinFolder as getGlobalBinFolder, run as globalRun} from '../cli/commands/global.js'; @@ -24,6 +26,36 @@ export const IGNORE_MANIFEST_KEYS: Set = new Set(['readme', 'notice', 'l // See https://github.com/yarnpkg/yarn/issues/2286. const IGNORE_CONFIG_KEYS = ['lastUpdateCheck']; +async function getPnpParameters(config: Config): Promise> { + if (await fs.exists(`${config.lockfileFolder}/${constants.PNP_FILENAME}`)) { + return ['-r', `${config.lockfileFolder}/${constants.PNP_FILENAME}`]; + } else { + return []; + } +} + +let wrappersFolder = null; + +export async function getWrappersFolder(config: Config): Promise { + if (wrappersFolder) { + return wrappersFolder; + } + + wrappersFolder = await fs.makeTempDir(); + + await makePortableProxyScript(process.execPath, wrappersFolder, { + proxyBasename: 'node', + prependArguments: [...(await getPnpParameters(config))], + }); + + await makePortableProxyScript(process.execPath, wrappersFolder, { + proxyBasename: 'yarn', + prependArguments: [process.argv[1]], + }); + + return wrappersFolder; +} + const INVALID_CHAR_REGEX = /\W/g; export async function makeEnv( @@ -167,7 +199,7 @@ export async function makeEnv( pathParts.unshift(globalBin); } - // add .bin folders to PATH + // Add node_modules .bin folders to the PATH for (const registry of Object.keys(registries)) { const binFolder = path.join(config.registries[registry].folder, '.bin'); if (config.workspacesEnabled && config.workspaceRootFolder) { @@ -180,10 +212,27 @@ export async function makeEnv( } } - if (config.scriptsPrependNodePath) { - pathParts.unshift(path.join(path.dirname(process.execPath))); + // Otherwise, only add the top-level dependencies to the PATH + // Note that this isn't enough when executing scripts from subdependencies, but since dependencies with postinstall + // scripts have other issues that require us to make them fallback to regular node_modules installation (like sharing + // artifacts), we can sit on this one until we fix everything at once. + if (await fs.exists(`${config.lockfileFolder}/${constants.PNP_FILENAME}`)) { + const pnpApi = dynamicRequire(`${config.lockfileFolder}/${constants.PNP_FILENAME}`); + const topLevelInformation = pnpApi.getPackageInformation({name: null, reference: null}); + + for (const [name, reference] of topLevelInformation.packageDependencies.entries()) { + const dependencyInformation = pnpApi.getPackageInformation({name, reference}); + + if (!dependencyInformation || !dependencyInformation.packageLocation) { + continue; + } + + pathParts.unshift(`${dependencyInformation.packageLocation}/.bin`); + } } + pathParts.unshift(await getWrappersFolder(config)); + // join path back together env[constants.ENV_PATH_KEY] = pathParts.join(path.delimiter); @@ -256,7 +305,7 @@ async function _checkForGyp(config: Config, paths: Array): Promise const {reporter} = config; // Check every directory in the PATH - const allChecks = await Promise.all(paths.map(dir => exists(path.join(dir, 'node-gyp')))); + const allChecks = await Promise.all(paths.map(dir => fs.exists(path.join(dir, 'node-gyp')))); if (allChecks.some(Boolean)) { // node-gyp is available somewhere return; diff --git a/src/util/fs.js b/src/util/fs.js index 8222a0cb16..7877d0a63c 100644 --- a/src/util/fs.js +++ b/src/util/fs.js @@ -675,10 +675,18 @@ export async function find(filename: string, dir: string): Promise { + if (process.platform !== 'win32') { + // use relative paths otherwise which will be retained if the directory is moved + src = path.relative(path.dirname(dest), src); + // When path.relative returns an empty string for the current directory, we should instead use + // '.', which is a valid fs.symlink target. + src = src || '.'; + } + try { const stats = await lstat(dest); if (stats.isSymbolicLink()) { - const resolved = await realpath(dest); + const resolved = dest; if (resolved === src) { return; } @@ -688,6 +696,7 @@ export async function symlink(src: string, dest: string): Promise { throw err; } } + // We use rimraf for unlink which never throws an ENOENT on missing target await unlink(dest); @@ -695,11 +704,7 @@ export async function symlink(src: string, dest: string): Promise { // use directory junctions if possible on win32, this requires absolute paths await fsSymlink(src, dest, 'junction'); } else { - // use relative paths otherwise which will be retained if the directory is moved - const relative = path.relative(path.dirname(dest), src); - // When path.relative returns an empty string for the current directory, we should instead use - // '.', which is a valid fs.symlink target. - await fsSymlink(relative || '.', dest); + await fsSymlink(src, dest); } } diff --git a/src/util/generate-pnp-map-api.tpl.js b/src/util/generate-pnp-map-api.tpl.js new file mode 100644 index 0000000000..956b8d5c15 --- /dev/null +++ b/src/util/generate-pnp-map-api.tpl.js @@ -0,0 +1,814 @@ +#!$$SHEBANG + +/* eslint-disable max-len, flowtype/require-valid-file-annotation, flowtype/require-return-type */ +/* global packageInformationStores, $$BLACKLIST, $$SETUP_STATIC_TABLES */ + +// Used for the resolveUnqualified part of the resolution (ie resolving folder/index.js & file extensions) +// Deconstructed so that they aren't affected by any fs monkeypatching occuring later during the execution +const {statSync, lstatSync, readlinkSync, readFileSync, existsSync, realpathSync} = require('fs'); + +const Module = require('module'); +const path = require('path'); +const StringDecoder = require('string_decoder'); + +const ignorePattern = $$BLACKLIST ? new RegExp($$BLACKLIST) : null; + +const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding('natives'))); +const patchedModules = new Map(); + +// Splits a require request into its components, or return null if the request is a file path +const pathRegExp = /^(?!\.{0,2}(?:\/|$))((?:@[^\/]+\/)?[^\/]+)\/?(.*|)$/; + +// Matches if the path starts with a valid path qualifier (./, ../, /) +// eslint-disable-next-line no-unused-vars +const isStrictRegExp = /^\.{0,2}\//; + +// Matches if the path must point to a directory (ie ends with /) +const isDirRegExp = /\/$/; + +const topLevelLocator = {name: null, reference: null}; +const blacklistedLocator = {name: NaN, reference: NaN}; + +const pnpModule = module; + +/** + * Used to disable the resolution hooks (for when we want to fallback to the previous resolution - we then need + * a way to "reset" the environment temporarily) + */ + +let enableNativeHooks = true; + +/** + * Simple helper function that assign an error code to an error, so that it can more easily be caught and used + * by third-parties. + */ + +function makeError(code, message, data = {}) { + const error = new Error(message); + return Object.assign(error, {code, data}); +} + +/** + * Ensures that the returned locator isn't a blacklisted one. + * + * Blacklisted packages are packages that cannot be used because their dependencies cannot be deduced. This only + * happens with peer dependencies, which effectively have different sets of dependencies depending on their parents. + * + * In order to deambiguate those different sets of dependencies, the Yarn implementation of PnP will generate a + * symlink for each combination of // it will find, and will + * blacklist the target of those symlinks. By doing this, we ensure that files loaded through a specific path + * will always have the same set of dependencies, provided the symlinks are correctly preserved. + * + * Unfortunately, some tools do not preserve them, and when it happens PnP isn't able anymore to deduce the set of + * dependencies based on the path of the file that makes the require calls. But since we've blacklisted those paths, + * we're able to print a more helpful error message that points out that a third-party package is doing something + * incompatible! + */ + +// eslint-disable-next-line no-unused-vars +function blacklistCheck(locator) { + if (locator === blacklistedLocator) { + throw makeError( + `BLACKLISTED`, + [ + `A package has been resolved through a blacklisted path - this is usually caused by one of your tools calling`, + `"realpath" on the return value of "require.resolve". Since the returned values use symlinks to disambiguate`, + `peer dependencies, they must be passed untransformed to "require".`, + ].join(` `), + ); + } + + return locator; +} + +$$SETUP_STATIC_TABLES(); + +/** + * Returns the module that should be used to resolve require calls. It's usually the direct parent, except if we're + * inside an eval expression. + */ + +function getIssuerModule(parent) { + let issuer = parent; + + while (issuer && (issuer.id === '[eval]' || issuer.id === '' || !issuer.filename)) { + issuer = issuer.parent; + } + + return issuer; +} + +/** + * Returns information about a package in a safe way (will throw if they cannot be retrieved) + */ + +function getPackageInformationSafe(packageLocator) { + const packageInformation = exports.getPackageInformation(packageLocator); + + if (!packageInformation) { + throw makeError( + `INTERNAL`, + `Couldn't find a matching entry in the dependency tree for the specified parent (this is probably an internal error)`, + ); + } + + return packageInformation; +} + +/** + * Implements the node resolution for folder access and extension selection + */ + +function applyNodeExtensionResolution(unqualifiedPath, {extensions}) { + // We use this "infinite while" so that we can restart the process as long as we hit package folders + while (true) { + let stat; + + try { + stat = statSync(unqualifiedPath); + } catch (error) {} + + // If the file exists and is a file, we can stop right there + + if (stat && !stat.isDirectory()) { + // If the very last component of the resolved path is a symlink to a file, we then resolve it to a file. We only + // do this first the last component, and not the rest of the path! This allows us to support the case of bin + // symlinks, where a symlink in "/xyz/pkg-name/.bin/bin-name" will point somewhere else (like "/xyz/pkg-name/index.js"). + // In such a case, we want relative requires to be resolved relative to "/xyz/pkg-name/" rather than "/xyz/pkg-name/.bin/". + // + // Also note that the reason we must use readlink on the last component (instead of realpath on the whole path) + // is that we must preserve the other symlinks, in particular those used by pnp to deambiguate packages using + // peer dependencies. For example, "/xyz/.pnp/local/pnp-01234569/.bin/bin-name" should see its relative requires + // be resolved relative to "/xyz/.pnp/local/pnp-0123456789/" rather than "/xyz/pkg-with-peers/", because otherwise + // we would lose the information that would tell us what are the dependencies of pkg-with-peers relative to its + // ancestors. + + if (lstatSync(unqualifiedPath).isSymbolicLink()) { + unqualifiedPath = path.normalize(path.resolve(path.dirname(unqualifiedPath), readlinkSync(unqualifiedPath))); + } + + return unqualifiedPath; + } + + // If the file is a directory, we must check if it contains a package.json with a "main" entry + + if (stat && stat.isDirectory()) { + let pkgJson; + + try { + pkgJson = JSON.parse(readFileSync(`${unqualifiedPath}/package.json`, 'utf-8')); + } catch (error) {} + + let nextUnqualifiedPath; + + if (pkgJson && pkgJson.main) { + nextUnqualifiedPath = path.resolve(unqualifiedPath, pkgJson.main); + } + + // If the "main" field changed the path, we start again from this new location + + if (nextUnqualifiedPath && nextUnqualifiedPath !== unqualifiedPath) { + unqualifiedPath = nextUnqualifiedPath; + continue; + } + } + + // Otherwise we check if we find a file that match one of the supported extensions + + const qualifiedPath = extensions + .map(extension => { + return `${unqualifiedPath}${extension}`; + }) + .find(candidateFile => { + return existsSync(candidateFile); + }); + + if (qualifiedPath) { + return qualifiedPath; + } + + // Otherwise, we check if the path is a folder - in such a case, we try to use its index + + if (stat && stat.isDirectory()) { + const indexPath = extensions + .map(extension => { + return `${unqualifiedPath}/index${extension}`; + }) + .find(candidateFile => { + return existsSync(candidateFile); + }); + + if (indexPath) { + return indexPath; + } + } + + // Otherwise there's nothing else we can do :( + + return null; + } +} + +/** + * This function creates fake modules that can be used with the _resolveFilename function. + * Ideally it would be nice to be able to avoid this, since it causes useless allocations + * and cannot be cached efficiently (we recompute the nodeModulePaths every time). + * + * Fortunately, this should only affect the fallback, and there hopefully shouldn't be a + * lot of them. + */ + +function makeFakeModule(path) { + const fakeModule = new Module(path, false); + fakeModule.filename = path; + fakeModule.paths = Module._nodeModulePaths(path); + return fakeModule; +} + +/** + * Forward the resolution to the next resolver (usually the native one) + */ + +function callNativeResolution(request, issuer) { + if (issuer.endsWith('/')) { + issuer += 'internal.js'; + } + + try { + enableNativeHooks = false; + + // Since we would need to create a fake module anyway (to call _resolveLookupPath that + // would give us the paths to give to _resolveFilename), we can as well not use + // the {paths} option at all, since it internally makes _resolveFilename create another + // fake module anyway. + return Module._resolveFilename(request, makeFakeModule(issuer), false); + } finally { + enableNativeHooks = true; + } +} + +/** + * This key indicates which version of the standard is implemented by this resolver. The `std` key is the + * Plug'n'Play standard, and any other key are third-party extensions. Third-party extensions are not allowed + * to override the standard, and can only offer new methods. + * + * If an new version of the Plug'n'Play standard is released and some extensions conflict with newly added + * functions, they'll just have to fix the conflicts and bump their own version number. + */ + +exports.VERSIONS = {std: 1}; + +/** + * Useful when used together with getPackageInformation to fetch information about the top-level package. + */ + +exports.topLevel = {name: null, reference: null}; + +/** + * Gets the package information for a given locator. Returns null if they cannot be retrieved. + */ + +exports.getPackageInformation = function getPackageInformation({name, reference}) { + const packageInformationStore = packageInformationStores.get(name); + + if (!packageInformationStore) { + return null; + } + + const packageInformation = packageInformationStore.get(reference); + + if (!packageInformation) { + return null; + } + + return packageInformation; +}; + +/** + * Transforms a request (what's typically passed as argument to the require function) into an unqualified path. + * This path is called "unqualified" because it only changes the package name to the package location on the disk, + * which means that the end result still cannot be directly accessed (for example, it doesn't try to resolve the + * file extension, or to resolve directories to their "index.js" content). Use the "resolveUnqualified" function + * to convert them to fully-qualified paths, or just use "resolveRequest" that do both operations in one go. + * + * Note that it is extremely important that the `issuer` path ends with a forward slash if the issuer is to be + * treated as a folder (ie. "/tmp/foo/" rather than "/tmp/foo" if "foo" is a directory). Otherwise relative + * imports won't be computed correctly (they'll get resolved relative to "/tmp/" instead of "/tmp/foo/"). + */ + +exports.resolveToUnqualified = function resolveToUnqualified(request, issuer, {considerBuiltins = true} = {}) { + // Bailout if the request is a native module + + if (considerBuiltins && builtinModules.has(request)) { + return null; + } + + // We allow disabling the pnp resolution for some subpaths. This is because some projects, often legacy, + // contain multiple levels of dependencies (ie. a yarn.lock inside a subfolder of a yarn.lock). This is + // typically solved using workspaces, but not all of them have been converted already. + + if (ignorePattern && ignorePattern.test(issuer)) { + const result = callNativeResolution(request, issuer); + + if (result === false) { + throw makeError( + `BUILTIN_NODE_RESOLUTION_FAIL`, + `The builtin node resolution algorithm was unable to resolve the module referenced by "${request}" and requested from "${issuer}" (it didn't go through the pnp resolver because the issuer was explicitely ignored by the regexp "$$BLACKLIST")`, + { + request, + issuer, + }, + ); + } + + return result; + } + + let unqualifiedPath; + + // If the request is a relative or absolute path, we just return it normalized + + const dependencyNameMatch = request.match(pathRegExp); + + if (!dependencyNameMatch) { + if (path.isAbsolute(request)) { + unqualifiedPath = path.normalize(request); + } else if (issuer.match(isDirRegExp)) { + unqualifiedPath = path.normalize(path.resolve(issuer, request)); + } else { + unqualifiedPath = path.normalize(path.resolve(path.dirname(issuer), request)); + } + } + + // Things are more hairy if it's a package require - we then need to figure out which package is needed, and in + // particular the exact version for the given location on the dependency tree + + if (dependencyNameMatch) { + const [, dependencyName, subPath] = dependencyNameMatch; + + const issuerLocator = exports.findPackageLocator(issuer); + + // If the issuer file doesn't seem to be owned by a package managed through pnp, then we resort to using the next + // resolution algorithm in the chain, usually the native Node resolution one + + if (!issuerLocator) { + const result = callNativeResolution(request, issuer); + + if (result === false) { + throw makeError( + `BUILTIN_NODE_RESOLUTION_FAIL`, + `The builtin node resolution algorithm was unable to resolve the module referenced by "${request}" and requested from "${issuer}" (it didn't go through the pnp resolver because the issuer doesn't seem to be part of the Yarn-managed dependency tree)`, + { + request, + issuer, + }, + ); + } + + return result; + } + + const issuerInformation = getPackageInformationSafe(issuerLocator); + + // We obtain the dependency reference in regard to the package that request it + + let dependencyReference = issuerInformation.packageDependencies.get(dependencyName); + + // If we can't find it, we check if we can potentially load it from the top-level packages + // it's a bit of a hack, but it improves compatibility with the existing Node ecosystem. Hopefully we should + // eventually be able to kill it and become stricter once pnp gets enough traction + + if (dependencyReference === undefined) { + const topLevelInformation = getPackageInformationSafe(topLevelLocator); + dependencyReference = topLevelInformation.packageDependencies.get(dependencyName); + } + + // If we can't find the path, and if the package making the request is the top-level, we can offer nicer error messages + + if (!dependencyReference) { + if (dependencyReference === null) { + if (issuerLocator === topLevelLocator) { + throw makeError( + `MISSING_PEER_DEPENDENCY`, + `You seem to be requiring a peer dependency ("${dependencyName}"), but it is not installed (which might be because you're the top-level package)`, + {request, issuer, dependencyName}, + ); + } else { + throw makeError( + `MISSING_PEER_DEPENDENCY`, + `Package "${issuerLocator.name}@${issuerLocator.reference}" is trying to access a peer dependency ("${dependencyName}") that should be provided by its direct ancestor but isn't`, + {request, issuer, issuerLocator: Object.assign({}, issuerLocator), dependencyName}, + ); + } + } else { + if (issuerLocator === topLevelLocator) { + throw makeError( + `UNDECLARED_DEPENDENCY`, + `You cannot require a package ("${dependencyName}") that is not declared in your dependencies (via "${issuer}")`, + {request, issuer, dependencyName}, + ); + } else { + const candidates = Array.from(issuerInformation.packageDependencies.keys()); + throw makeError( + `UNDECLARED_DEPENDENCY`, + `Package "${issuerLocator.name}@${issuerLocator.reference}" (via "${issuer}") is trying to require the package "${dependencyName}" (via "${request}") without it being listed in its dependencies (${candidates.join( + `, `, + )})`, + {request, issuer, issuerLocator: Object.assign({}, issuerLocator), dependencyName, candidates}, + ); + } + } + } + + // We need to check that the package exists on the filesystem, because it might not have been installed + + const dependencyLocator = {name: dependencyName, reference: dependencyReference}; + const dependencyInformation = exports.getPackageInformation(dependencyLocator); + const dependencyLocation = path.resolve(__dirname, dependencyInformation.packageLocation); + + if (!dependencyLocation) { + throw makeError( + `MISSING_DEPENDENCY`, + `Package "${dependencyLocator.name}@${dependencyLocator.reference}" is a valid dependency, but hasn't been installed and thus cannot be required (it might be caused if you install a partial tree, such as on production environments)`, + {request, issuer, dependencyLocator: Object.assign({}, dependencyLocator)}, + ); + } + + // Now that we know which package we should resolve to, we only have to find out the file location + + if (subPath) { + unqualifiedPath = path.resolve(dependencyLocation, subPath); + } else { + unqualifiedPath = dependencyLocation; + } + } + + return path.normalize(unqualifiedPath); +}; + +/** + * Transforms an unqualified path into a qualified path by using the Node resolution algorithm (which automatically + * appends ".js" / ".json", and transforms directory accesses into "index.js"). + */ + +exports.resolveUnqualified = function resolveUnqualified( + unqualifiedPath, + {extensions = Object.keys(Module._extensions)} = {}, +) { + const qualifiedPath = applyNodeExtensionResolution(unqualifiedPath, {extensions}); + + if (qualifiedPath) { + return path.normalize(qualifiedPath); + } else { + throw makeError( + `QUALIFIED_PATH_RESOLUTION_FAILED`, + `Couldn't find a suitable Node resolution for unqualified path "${unqualifiedPath}"`, + {unqualifiedPath}, + ); + } +}; + +/** + * Transforms a request into a fully qualified path. + * + * Note that it is extremely important that the `issuer` path ends with a forward slash if the issuer is to be + * treated as a folder (ie. "/tmp/foo/" rather than "/tmp/foo" if "foo" is a directory). Otherwise relative + * imports won't be computed correctly (they'll get resolved relative to "/tmp/" instead of "/tmp/foo/"). + */ + +exports.resolveRequest = function resolveRequest(request, issuer, {considerBuiltins, extensions} = {}) { + let unqualifiedPath; + + try { + unqualifiedPath = exports.resolveToUnqualified(request, issuer, {considerBuiltins}); + } catch (originalError) { + // If we get a BUILTIN_NODE_RESOLUTION_FAIL error there, it means that we've had to use the builtin node + // resolution, which usually shouldn't happen. It might be because the user is trying to require something + // from a path loaded through a symlink (which is not possible, because we need something normalized to + // figure out which package is making the require call), so we try to make the same request using a fully + // resolved issuer and throws a better and more actionable error if it works. + if (originalError.code === `BUILTIN_NODE_RESOLUTION_FAIL`) { + let realIssuer; + + try { + realIssuer = realpathSync(issuer); + } catch (error) {} + + if (realIssuer) { + if (issuer.endsWith(`/`)) { + realIssuer = realIssuer.replace(/\/?$/, `/`); + } + + try { + exports.resolveToUnqualified(request, realIssuer, {extensions}); + } catch (error) { + // If an error was thrown, the problem doesn't seem to come from a path not being normalized, so we + // can just throw the original error which was legit. + throw originalError; + } + + // If we reach this stage, it means that resolveToUnqualified didn't fail when using the fully resolved + // file path, which is very likely caused by a module being invoked through Node with a path not being + // correctly normalized (ie you should use "node $(realpath script.js)" instead of "node script.js"). + throw makeError( + `SYMLINKED_PATH_DETECTED`, + `A pnp module ("${request}") has been required from what seems to be a symlinked path ("${issuer}"). This is not possible, you must ensure that your modules are invoked through their fully resolved path on the filesystem (in this case "${realIssuer}").`, + { + request, + issuer, + realIssuer, + }, + ); + } + } + throw originalError; + } + + if (unqualifiedPath === null) { + return null; + } + + try { + return exports.resolveUnqualified(unqualifiedPath); + } catch (resolutionError) { + if (resolutionError.code === 'QUALIFIED_PATH_RESOLUTION_FAILED') { + Object.assign(resolutionError.data, {request, issuer}); + } + throw resolutionError; + } +}; + +/** + * Setups the hook into the Node environment. + * + * From this point on, any call to `require()` will go through the "resolveRequest" function, and the result will + * be used as path of the file to load. + */ + +exports.setup = function setup() { + // A small note: we don't replace the cache here (and instead use the native one). This is an effort to not + // break code similar to "delete require.cache[require.resolve(FOO)]", where FOO is a package located outside + // of the Yarn dependency tree. In this case, we defer the load to the native loader. If we were to replace the + // cache by our own, the native loader would populate its own cache, which wouldn't be exposed anymore, so the + // delete call would be broken. + + const originalModuleLoad = Module._load; + + Module._load = function(request, parent, isMain) { + if (!enableNativeHooks) { + return originalModuleLoad.call(Module, request, parent, isMain); + } + + // Builtins are managed by the regular Node loader + + if (builtinModules.has(request)) { + try { + enableNativeHooks = false; + return originalModuleLoad.call(Module, request, parent, isMain); + } finally { + enableNativeHooks = true; + } + } + + // The 'pnpapi' name is reserved to return the PnP api currently in use by the program + + if (request === `pnpapi`) { + return pnpModule.exports; + } + + // Request `Module._resolveFilename` (ie. `resolveRequest`) to tell us which file we should load + + const modulePath = Module._resolveFilename(request, parent, isMain); + + // Check if the module has already been created for the given file + + const cacheEntry = Module._cache[modulePath]; + + if (cacheEntry) { + return cacheEntry.exports; + } + + // Create a new module and store it into the cache + + const module = new Module(modulePath, parent); + Module._cache[modulePath] = module; + + // The main module is exposed as global variable + + if (isMain) { + process.mainModule = module; + module.id = '.'; + } + + // Try to load the module, and remove it from the cache if it fails + + let hasThrown = true; + + try { + module.load(modulePath); + hasThrown = false; + } finally { + if (hasThrown) { + delete Module._cache[modulePath]; + } + } + + // Some modules might have to be patched for compatibility purposes + + if (patchedModules.has(request)) { + module.exports = patchedModules.get(request)(module.exports); + } + + return module.exports; + }; + + const originalModuleResolveFilename = Module._resolveFilename; + + Module._resolveFilename = function(request, parent, isMain, options) { + if (!enableNativeHooks) { + return originalModuleResolveFilename.call(Module, request, parent, isMain, options); + } + + const issuerModule = getIssuerModule(parent); + const issuer = issuerModule ? issuerModule.filename : process.cwd() + '/'; + + const resolution = exports.resolveRequest(request, issuer); + return resolution !== null ? resolution : request; + }; + + const originalFindPath = Module._findPath; + + Module._findPath = function(request, paths, isMain) { + if (!enableNativeHooks) { + return originalFindPath.call(Module, request, paths, isMain); + } + + for (const path of paths) { + let resolution; + + try { + resolution = exports.resolveRequest(request, path); + } catch (error) { + continue; + } + + if (resolution) { + return resolution; + } + } + + return false; + }; +}; + +exports.setupCompatibilityLayer = () => { + // see https://github.com/browserify/resolve/blob/master/lib/caller.js + const getCaller = () => { + const origPrepareStackTrace = Error.prepareStackTrace; + + Error.prepareStackTrace = (_, stack) => stack; + const stack = new Error().stack; + Error.prepareStackTrace = origPrepareStackTrace; + + return stack[2].getFileName(); + }; + + // We need to shim the "resolve" module, because Liftoff uses it in order to find the location + // of the module in the dependency tree. And Liftoff is used to power Gulp, which doesn't work + // at all unless modulePath is set, which we cannot configure from any other way than through + // the Liftoff pipeline (the key isn't whitelisted for env or cli options). + + patchedModules.set('resolve', realResolve => { + const mustBeShimmed = caller => { + const callerLocator = exports.findPackageLocator(caller); + + return callerLocator && callerLocator.name === 'liftoff'; + }; + + const attachCallerToOptions = (caller, options) => { + if (!options.basedir) { + options.basedir = path.dirname(caller); + } + }; + + const resolveSyncShim = (request, {basedir}) => { + return exports.resolveRequest(request, basedir, { + considerBuiltins: false, + }); + }; + + const resolveShim = (request, options, callback) => { + setImmediate(() => { + let error; + let result; + + try { + result = resolveSyncShim(request, options); + } catch (thrown) { + error = thrown; + } + + callback(error, result); + }); + }; + + return Object.assign( + (request, options, callback) => { + if (typeof options === 'function') { + callback = options; + options = {}; + } else if (!options) { + options = {}; + } + + const caller = getCaller(); + attachCallerToOptions(caller, options); + + if (mustBeShimmed(caller)) { + return resolveShim(request, options, callback); + } else { + return realResolve.sync(request, options, callback); + } + }, + { + sync: (request, options) => { + if (!options) { + options = {}; + } + + const caller = getCaller(); + attachCallerToOptions(caller, options); + + if (mustBeShimmed(caller)) { + return resolveSyncShim(request, options); + } else { + return realResolve.sync(request, options); + } + }, + isCore: request => { + return realResolve.isCore(request); + }, + }, + ); + }); +}; + +if (module.parent && module.parent.id === 'internal/preload') { + exports.setup(); + exports.setupCompatibilityLayer(); +} + +if (process.mainModule === module) { + const reportError = (code, message, data) => { + process.stdout.write(`${JSON.stringify([{code, message, data}, null])}\n`); + }; + + const reportSuccess = resolution => { + process.stdout.write(`${JSON.stringify([null, resolution])}\n`); + }; + + const processResolution = (request, issuer) => { + try { + reportSuccess(exports.resolveRequest(request, issuer)); + } catch (error) { + reportError(error.code, error.message, error.data); + } + }; + + const processRequest = data => { + try { + const [request, issuer] = JSON.parse(data); + processResolution(request, issuer); + } catch (error) { + reportError(`INVALID_JSON`, error.message, error.data); + } + }; + + if (process.argv.length > 2) { + if (process.argv.length !== 4) { + process.stderr.write(`Usage: ${process.argv[0]} ${process.argv[1]} \n`); + process.exitCode = 64; /* EX_USAGE */ + } else { + processResolution(process.argv[2], process.argv[3]); + } + } else { + let buffer = ''; + const decoder = new StringDecoder.StringDecoder(); + + process.stdin.on('data', chunk => { + buffer += decoder.write(chunk); + + do { + const index = buffer.indexOf('\n'); + if (index === -1) { + break; + } + + const line = buffer.slice(0, index); + buffer = buffer.slice(index + 1); + + processRequest(line); + } while (true); + }); + } +} diff --git a/src/util/generate-pnp-map.js b/src/util/generate-pnp-map.js new file mode 100644 index 0000000000..c3d81d7919 --- /dev/null +++ b/src/util/generate-pnp-map.js @@ -0,0 +1,436 @@ +// @flow + +import type Config from '../config.js'; +import type WorkspaceLayout from '../workspace-layout.js'; +import type PackageResolver from '../package-resolver.js'; +import type Reporter from '../reporters/base-reporter.js'; +import pnpApi from './generate-pnp-map-api.tpl.js'; +import * as fs from './fs.js'; + +const crypto = require('crypto'); +const invariant = require('invariant'); +const path = require('path'); + +const OFFLINE_CACHE_EXTENSION = `.zip`; + +type PackageInformation = {| + packageLocation: string, + packageDependencies: Map, +|}; + +type PackageInformationStore = Map; +type PackageInformationStores = Map; + +type GeneratePnpMapOptions = {| + resolver: PackageResolver, + reporter: Reporter, + targetPath: string, + workspaceLayout: ?WorkspaceLayout, +|}; + +function generateMaps(packageInformationStores: PackageInformationStores, blacklistedLocations: Set): string { + let code = ``; + + // Bake the information stores into our generated code + code += `let packageInformationStores = new Map([\n`; + for (const [packageName, packageInformationStore] of packageInformationStores) { + code += ` [${JSON.stringify(packageName)}, new Map([\n`; + for (const [packageReference, {packageLocation, packageDependencies}] of packageInformationStore) { + code += ` [${JSON.stringify(packageReference)}, {\n`; + code += ` packageLocation: path.resolve(__dirname, ${JSON.stringify(packageLocation)}),\n`; + code += ` packageDependencies: new Map([\n`; + for (const [dependencyName, dependencyReference] of packageDependencies.entries()) { + code += ` [${JSON.stringify(dependencyName)}, ${JSON.stringify(dependencyReference)}],\n`; + } + code += ` ]),\n`; + code += ` }],\n`; + } + code += ` ])],\n`; + } + code += `]);\n`; + + code += `\n`; + + // Also bake an inverse map that will allow us to find the package information based on the path + code += `let locatorsByLocations = new Map([\n`; + for (const blacklistedLocation of blacklistedLocations) { + code += ` [${JSON.stringify(blacklistedLocation)}, blacklistedLocator],\n`; + } + for (const [packageName, packageInformationStore] of packageInformationStores) { + for (const [packageReference, {packageLocation}] of packageInformationStore) { + if (packageName !== null) { + code += ` [${JSON.stringify(packageLocation)}, ${JSON.stringify({ + name: packageName, + reference: packageReference, + })}],\n`; + } else { + code += ` [${JSON.stringify(packageLocation)}, topLevelLocator],\n`; + } + } + } + code += `]);\n`; + + return code; +} + +function generateFindPackageLocator(packageInformationStores: PackageInformationStores): string { + let code = ``; + + // We get the list of each string length we'll need to check in order to find the current package context + const lengths = new Map(); + + for (const packageInformationStore of packageInformationStores.values()) { + for (const {packageLocation} of packageInformationStore.values()) { + if (packageLocation === null) { + continue; + } + + const length = packageLocation.length; + const count = (lengths.get(length) || 0) + 1; + + lengths.set(length, count); + } + } + + // We must try the larger lengths before the smaller ones, because smaller ones might also match the longest ones + // (for instance, /project/path will match /project/path/.pnp/global/node_modules/pnp-cf5f9c17b8f8db) + const sortedLengths = Array.from(lengths.entries()).sort((a, b) => { + return b[0] - a[0]; + }); + + // Generate a function that, given a file path, returns the associated package name + code += `exports.findPackageLocator = function findPackageLocator(location) {\n`; + code += ` let relativeLocation = path.relative(__dirname, location);\n`; + code += `\n`; + code += ` if (!relativeLocation.match(isStrictRegExp))\n`; + code += ` relativeLocation = \`./\${relativeLocation}\`;\n`; + code += `\n`; + code += ` if (location.match(isDirRegExp) && relativeLocation.charAt(relativeLocation.length - 1) !== '/')\n`; + code += ` relativeLocation = \`\${relativeLocation}/\`;\n`; + code += `\n`; + code += ` let match;\n`; + + for (const [length] of sortedLengths) { + code += `\n`; + code += ` if (relativeLocation.length >= ${length} && relativeLocation[${length - 1}] === '/')\n`; + code += ` if (match = locatorsByLocations.get(relativeLocation.substr(0, ${length})))\n`; + code += ` return blacklistCheck(match);\n`; + } + + code += `\n`; + code += ` return null;\n`; + code += `};\n`; + + return code; +} + +async function getPackageInformationStores( + config: Config, + seedPatterns: Array, + {resolver, reporter, targetPath, workspaceLayout}: GeneratePnpMapOptions, +): Promise<[PackageInformationStores, Set]> { + const targetDirectory = path.dirname(targetPath); + const offlineCacheFolder = config.offlineCacheFolder; + + const packageInformationStores: PackageInformationStores = new Map(); + const blacklistedLocations: Set = new Set(); + + const getCachePath = (fsPath: string) => { + const cacheRelativePath = path.relative(config.cacheFolder, fsPath); + + // if fsPath is not inside cacheRelativePath, we just skip it + if (cacheRelativePath.match(/^\.\.\//)) { + return null; + } + + return cacheRelativePath; + }; + + const resolveOfflineCacheFolder = (fsPath: string) => { + if (!offlineCacheFolder) { + return fsPath; + } + + const cacheRelativePath = getCachePath(fsPath); + + // if fsPath is not inside the cache, we shouldn't replace it (workspace) + if (!cacheRelativePath) { + return fsPath; + } + + const components = cacheRelativePath.split(/\//g); + const [cacheEntry, ...internalPath] = components; + + return path.resolve(offlineCacheFolder, `${cacheEntry}${OFFLINE_CACHE_EXTENSION}`, internalPath.join('/')); + }; + + const normalizeDirectoryPath = (fsPath: string) => { + let relativePath = path.relative(targetDirectory, resolveOfflineCacheFolder(fsPath)); + + if (!relativePath.match(/^\.{0,2}\//)) { + relativePath = `./${relativePath}`; + } + + return relativePath.replace(/\/?$/, '/'); + }; + + const getHashFrom = (data: Array) => { + const hashGenerator = crypto.createHash('sha1'); + + for (const datum of data) { + hashGenerator.update(datum); + } + + return hashGenerator.digest('hex'); + }; + + const getResolverEntry = pattern => { + const pkg = resolver.getStrictResolvedPattern(pattern); + const ref = pkg._reference; + + if (!ref) { + return null; + } + + invariant(ref.locations.length <= 1, 'Must have at most one location (usually in the cache)'); + const loc = ref.locations[0]; + + if (!loc) { + return null; + } + + return {pkg, ref, loc}; + }; + + const visit = async (seedPatterns: Array, parentData: Array = []) => { + const resolutions = new Map(); + const locations = new Map(); + + // This first pass will compute the package reference of each of the given patterns + // They will usually be the package version, but not always. We need to do this in a pre-process pass, because the + // dependencies might depend on one another, so if we need to replace one of them, we need to compute it first + for (const pattern of seedPatterns) { + const entry = getResolverEntry(pattern); + + if (!entry) { + continue; + } + + const {pkg, ref} = entry; + let {loc} = entry; + + const packageName = pkg.name; + let packageReference = pkg.version; + + // If we have peer dependencies, then we generate a new virtual reference based on the parent one + // We cannot generate this reference based on what those peer references resolve to, because they might not have + // been computed yet (for example, consider the case where A has a peer dependency on B, and B a peer dependency + // on A; it's valid, but it prevents us from computing A and B - and it's even worse with 3+ packages involved) + const peerDependencies = new Set(Array.from(Object.keys(pkg.peerDependencies || {}))); + + // As an optimization, we only setup virtual packages if their underlying packages are referenced multiple times + // in the tree. This allow us to avoid having to create symlinks in the majority of cases + if (peerDependencies.size > 0 && ref.requests.length > 1) { + const hash = getHashFrom([...parentData, packageName, packageReference]); + + let symlinkSource; + let symlinkFile; + + switch (ref.remote.type) { + case 'workspace': + { + symlinkSource = loc; + symlinkFile = path.resolve(config.lockfileFolder, '.pnp', 'workspaces', `pnp-${hash}`, packageName); + + loc = symlinkFile; + } + break; + + default: + { + const isFromCache = getCachePath(loc); + + const hashName = + isFromCache && offlineCacheFolder ? `pnp-${hash}${OFFLINE_CACHE_EXTENSION}` : `pnp-${hash}`; + const newLoc = path.resolve( + config.lockfileFolder, + '.pnp', + 'externals', + hashName, + 'node_modules', + packageName, + ); + + // The `node_modules/` part is already there when the package comes from the cache + if (isFromCache) { + const getBase = source => path.resolve(source, '../'.repeat(1 + packageName.split('/').length)); + symlinkSource = resolveOfflineCacheFolder(getBase(loc)); + symlinkFile = getBase(newLoc); + } else { + symlinkSource = loc; + symlinkFile = newLoc; + } + + loc = newLoc; + } + break; + } + + await fs.mkdirp(path.dirname(symlinkFile)); + await fs.symlink(symlinkSource, symlinkFile); + + packageReference = `pnp:${hash}`; + + // We blacklist this path so that we can print a nicer error message if someone tries to require it (it usually + // means that they're using realpath on the return value of require.resolve) + blacklistedLocations.add(normalizeDirectoryPath(loc)); + } + + // Now that we have the final reference, we need to store it + resolutions.set(packageName, packageReference); + locations.set(packageName, loc); + } + + // Now that we have the final references, we can start the main loop, which will insert the packages into the store + // if they aren't already there, and recurse over their own children + for (const pattern of seedPatterns) { + const entry = getResolverEntry(pattern); + + if (!entry) { + continue; + } + + const {pkg, ref} = entry; + + const packageName = pkg.name; + + const packageReference = resolutions.get(packageName); + invariant(packageReference, `Package reference should have been computed during the pre-pass`); + + const loc = locations.get(packageName); + invariant(loc, `Package location should have been computed during the pre-pass`); + + // We can early exit if the package is already registered with the exact same name and reference, since even if + // we might get slightly different dependencies (depending on how things were optimized), both sets are valid + let packageInformationStore = packageInformationStores.get(packageName); + + if (!packageInformationStore) { + packageInformationStore = new Map(); + packageInformationStores.set(packageName, packageInformationStore); + } + + let packageInformation = packageInformationStore.get(packageReference); + + if (packageInformation) { + continue; + } + + packageInformation = { + packageLocation: normalizeDirectoryPath(loc), + packageDependencies: new Map(), + }; + + // Split the dependencies between direct/peer - we will only recurse on the former + const peerDependencies = new Set(Array.from(Object.keys(pkg.peerDependencies || {}))); + const directDependencies = ref.dependencies.filter(pattern => { + const pkg = resolver.getStrictResolvedPattern(pattern); + return !pkg || !peerDependencies.has(pkg.name); + }); + + // We do this in two steps to prevent cyclic dependencies from looping indefinitely + packageInformationStore.set(packageReference, packageInformation); + packageInformation.packageDependencies = await visit(directDependencies, [packageName, packageReference]); + + // We now have to inject the peer dependencies + for (const dependencyName of peerDependencies) { + const dependencyReference = resolutions.get(dependencyName); + + if (dependencyReference) { + packageInformation.packageDependencies.set(dependencyName, dependencyReference); + } + } + + // Finally, unless a package depends on a previous version of itself (that would be weird but correct...), we + // inject them an implicit dependency to themselves (so that they can require themselves) + if (!packageInformation.packageDependencies.has(packageName)) { + packageInformation.packageDependencies.set(packageName, packageReference); + } + } + + return resolutions; + }; + + // If we have workspaces, we need to iterate over them all in order to add them to the map + // This is because they might not be declared as dependencies of the top-level project (and with reason, since the + // top-level package might depend on a different than the one provided in the workspaces - cf Babel, which depends + // on an old version of itself in order to compile itself) + if (workspaceLayout) { + for (const name of Object.keys(workspaceLayout.workspaces)) { + const pkg = workspaceLayout.workspaces[name].manifest; + + // Skip the aggregator, since it's essentially a duplicate of the top-level package that we'll iterate later on + if (pkg.workspaces) { + continue; + } + + const ref = pkg._reference; + invariant(ref, `Workspaces should have a reference`); + + invariant(ref.locations.length === 1, `Workspaces should have exactly one location`); + const loc = ref.locations[0]; + invariant(loc, `Workspaces should have a location`); + + let packageInformationStore = packageInformationStores.get(name); + + if (!packageInformationStore) { + packageInformationStore = new Map(); + packageInformationStores.set(name, packageInformationStore); + } + + packageInformationStore.set(pkg.version, { + packageLocation: normalizeDirectoryPath(loc), + packageDependencies: await visit(ref.dependencies, [name, pkg.version]), + }); + } + } + + // Register the top-level package in our map + // This will recurse on each of its dependencies as well. + packageInformationStores.set( + null, + new Map([ + [ + null, + { + packageLocation: normalizeDirectoryPath(config.lockfileFolder), + packageDependencies: await visit(seedPatterns), + }, + ], + ]), + ); + + return [packageInformationStores, blacklistedLocations]; +} + +export async function generatePnpMap( + config: Config, + seedPatterns: Array, + {resolver, reporter, workspaceLayout, targetPath}: GeneratePnpMapOptions, +): Promise { + const [packageInformationStores, blacklistedLocations] = await getPackageInformationStores(config, seedPatterns, { + resolver, + reporter, + targetPath, + workspaceLayout, + }); + + const setupStaticTables = [ + generateMaps(packageInformationStores, blacklistedLocations), + generateFindPackageLocator(packageInformationStores), + ].join(``); + + return pnpApi + .replace(/\$\$SHEBANG/g, config.plugnplayShebang) + .replace(/\$\$BLACKLIST/g, JSON.stringify(config.plugnplayBlacklist)) + .replace(/\$\$SETUP_STATIC_TABLES\(\);/g, setupStaticTables); +} diff --git a/src/util/hooks.js b/src/util/hooks.js index d9f00e0592..4417b37f5c 100644 --- a/src/util/hooks.js +++ b/src/util/hooks.js @@ -1,6 +1,6 @@ /* @flow */ -export type YarnHook = 'resolveStep' | 'fetchStep' | 'linkStep' | 'buildStep'; +export type YarnHook = 'resolveStep' | 'fetchStep' | 'linkStep' | 'buildStep' | 'pnpStep'; const YARN_HOOKS_KEY = 'experimentalYarnHooks'; diff --git a/src/util/portable-script.js b/src/util/portable-script.js new file mode 100644 index 0000000000..be901cff00 --- /dev/null +++ b/src/util/portable-script.js @@ -0,0 +1,53 @@ +/* @flow */ + +import * as fs from './fs.js'; + +const path = require('path'); + +export type PortableProxyOptions = {| + proxyBasename?: string, + extraEnvironment?: Map, + prependArguments?: Array, + appendArguments?: Array, + pnpPackageName?: string, +|}; + +async function makePortableProxyScriptUnix( + source: string, + destination: string, + options: PortableProxyOptions, +): Promise { + const environment = options.extraEnvironment + ? Array.from(options.extraEnvironment.entries()).map(([key, value]) => `${key}="${value}"`).join(' ') + ' ' + : ''; + + const prependedArguments = options.prependArguments + ? ' ' + options.prependArguments.map(arg => `"${arg}"`).join(' ') + : ''; + const appendedArguments = options.appendArguments + ? ' ' + options.appendArguments.map(arg => `"${arg}"`).join(' ') + : ''; + + const filePath = `${destination}/${options.proxyBasename || path.basename(source)}`; + + // Unless impossible we want to preserve any symlinks used to call us when forwarding the call to the binary (so we + // cannot use realpath or transform relative paths into absolute ones), but we also need to tell the sh interpreter + // that the symlink should be resolved relative to the script directory (hence dirname "$0" at runtime). + const sourcePath = path.isAbsolute(source) ? source : `$(dirname "$0")/../${source}`; + + await fs.mkdirp(destination); + await fs.writeFile( + filePath, + `#!/bin/sh\n\n${environment}"${sourcePath}"${prependedArguments} "$@"${appendedArguments}\n`, + ); + await fs.chmod(filePath, 0o755); +} + +export function makePortableProxyScript( + source: string, + destination: string, + // $FlowFixMe Flow doesn't support exact types with empty default values + options: PortableProxyOptions = {}, +): Promise { + return makePortableProxyScriptUnix(source, destination, options); +} diff --git a/yarn.lock b/yarn.lock index f4e32618f6..7673f5fd4d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -748,6 +748,13 @@ babel-plugin-check-es2015-constants@^6.22.0: dependencies: babel-runtime "^6.22.0" +babel-plugin-inline-import@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/babel-plugin-inline-import/-/babel-plugin-inline-import-2.0.6.tgz#8a3c179561b503bf4af319f3cad435e6b7b2863c" + integrity sha1-ijwXlWG1A79K8xnzytQ15reyhjw= + dependencies: + require-resolve "0.0.2" + babel-plugin-istanbul@^4.1.5: version "4.1.6" resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz#36c59b2192efce81c5b378321b74175add1c9a45" @@ -5750,6 +5757,11 @@ path-exists@^3.0.0: resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= +path-extra@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/path-extra/-/path-extra-1.0.3.tgz#7c112189a6e50d595790e7ad2037e44e410c1166" + integrity sha1-fBEhiablDVlXkOetIDfkTkEMEWY= + path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" @@ -6322,6 +6334,13 @@ require-main-filename@^1.0.1: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= +require-resolve@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/require-resolve/-/require-resolve-0.0.2.tgz#bab410ab1aee2f3f55b79317451dd3428764e6f3" + integrity sha1-urQQqxruLz9Vt5MXRR3TQodk5vM= + dependencies: + x-path "^0.0.2" + require-uncached@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" @@ -7382,9 +7401,10 @@ v8flags@^3.0.1: dependencies: homedir-polyfill "^1.0.1" -validate-npm-package-license@^3.0.4: +validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== dependencies: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" @@ -7642,6 +7662,13 @@ ws@^5.2.0: dependencies: async-limiter "~1.0.0" +x-path@^0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/x-path/-/x-path-0.0.2.tgz#294d076bb97a7706cc070bbb2a6fd8c54df67b12" + integrity sha1-KU0Ha7l6dwbMBwu7Km/YxU32exI= + dependencies: + path-extra "^1.0.2" + xml-name-validator@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"